2020-10-19 17:03:48 +02:00
import Human from '../dist/human.esm.js' ;
2020-10-17 13:34:45 +02:00
import draw from './draw.js' ;
2020-10-18 02:59:43 +02:00
import Menu from './menu.js' ;
2020-11-12 23:00:06 +01:00
import GLBench from '../assets/gl-bench.js' ;
2020-10-12 16:08:00 +02:00
2020-11-09 12:32:11 +01:00
const userConfig = { } ; // add any user configuration overrides
2020-11-10 14:57:39 +01:00
const human = new Human ( userConfig ) ;
2020-10-17 16:06:02 +02:00
// ui options
2020-10-13 15:59:21 +02:00
const ui = {
2020-11-03 04:15:37 +01:00
baseColor : 'rgba(173, 216, 230, 0.3)' , // 'lightblue' with light alpha channel
baseBackground : 'rgba(50, 50, 50, 1)' , // 'grey'
baseLabel : 'rgba(173, 216, 230, 0.9)' , // 'lightblue' with dark alpha channel
2020-10-17 13:15:23 +02:00
baseFontProto : 'small-caps {size} "Segoe UI"' ,
2020-11-05 21:38:09 +01:00
baseLineWidth : 12 ,
2020-10-17 13:15:23 +02:00
baseLineHeightProto : 2 ,
2020-11-08 18:26:45 +01:00
crop : true ,
2020-10-18 02:59:43 +02:00
columns : 2 ,
2020-10-16 16:12:12 +02:00
busy : false ,
2020-10-18 02:59:43 +02:00
facing : true ,
2020-10-17 13:34:45 +02:00
useWorker : false ,
2020-11-06 19:50:16 +01:00
worker : 'demo/worker.js' ,
2020-10-18 02:59:43 +02:00
samples : [ '../assets/sample6.jpg' , '../assets/sample1.jpg' , '../assets/sample4.jpg' , '../assets/sample5.jpg' , '../assets/sample3.jpg' , '../assets/sample2.jpg' ] ,
2020-11-13 22:13:35 +01:00
compare : '../assets/sample-me.jpg' ,
2020-10-17 13:34:45 +02:00
drawBoxes : true ,
drawPoints : false ,
drawPolygons : true ,
2020-11-08 18:26:45 +01:00
fillPolygons : false ,
2020-10-17 13:34:45 +02:00
useDepth : true ,
2020-10-17 16:06:02 +02:00
console : true ,
2020-11-12 15:21:26 +01:00
maxFPSframes : 10 ,
2020-11-04 16:18:22 +01:00
modelsPreload : true ,
modelsWarmup : true ,
2020-11-08 15:56:02 +01:00
menuWidth : 0 ,
menuHeight : 0 ,
camera : { } ,
fps : [ ] ,
2020-11-12 18:58:55 +01:00
buffered : false ,
2020-11-12 15:21:26 +01:00
bufferedFPSTarget : 24 ,
drawThread : null ,
framesDraw : 0 ,
framesDetect : 0 ,
2020-11-12 23:00:06 +01:00
bench : false ,
2020-10-13 15:59:21 +02:00
} ;
2020-10-17 16:06:02 +02:00
// global variables
2020-10-18 02:59:43 +02:00
let menu ;
2020-10-18 18:12:09 +02:00
let menuFX ;
2020-10-15 15:43:16 +02:00
let worker ;
2020-11-12 23:00:06 +01:00
let bench ;
2020-11-13 22:13:35 +01:00
let sample ;
2020-11-12 15:21:26 +01:00
let lastDetectedResult = { } ;
2020-10-12 16:08:00 +02:00
2020-10-17 16:06:02 +02:00
// helper function: translates json to human readable string
2020-10-15 00:22:38 +02:00
function str ( ... msg ) {
if ( ! Array . isArray ( msg ) ) return msg ;
let line = '' ;
for ( const entry of msg ) {
if ( typeof entry === 'object' ) line += JSON . stringify ( entry ) . replace ( /{|}|"|\[|\]/g , '' ) . replace ( /,/g , ', ' ) ;
else line += entry ;
}
return line ;
}
2020-10-17 16:06:02 +02:00
// helper function: wrapper around console output
2020-10-15 21:25:58 +02:00
const log = ( ... msg ) => {
// eslint-disable-next-line no-console
2020-10-17 16:06:02 +02:00
if ( ui . console ) console . log ( ... msg ) ;
2020-10-15 21:25:58 +02:00
} ;
2020-10-15 14:16:34 +02:00
2020-11-03 04:15:37 +01:00
const status = ( msg ) => {
// eslint-disable-next-line no-console
document . getElementById ( 'status' ) . innerText = msg ;
} ;
2020-11-13 22:13:35 +01:00
async function calcSimmilariry ( faces ) {
if ( ! faces || ! faces [ 0 ] || ( faces [ 0 ] . embedding ? . length !== 192 ) ) return ;
const current = faces [ 0 ] . embedding ;
const original = ( sample && sample . face && sample . face [ 0 ] && sample . face [ 0 ] . embedding ) ? sample . face [ 0 ] . embedding : null ;
if ( original && original . length === 192 ) {
const simmilarity = human . simmilarity ( current , original ) ;
document . getElementById ( 'simmilarity' ) . innerText = ` simmilarity: ${ Math . trunc ( 1000 * simmilarity ) / 10 } % ` ;
}
}
2020-10-17 16:06:02 +02:00
// draws processed results and starts processing of a next frame
2020-11-12 15:21:26 +01:00
async function drawResults ( input ) {
const result = lastDetectedResult ;
const canvas = document . getElementById ( 'canvas' ) ;
2020-11-03 00:54:03 +01:00
// update fps data
2020-11-12 15:21:26 +01:00
// const elapsed = performance.now() - timeStamp;
2020-11-13 22:13:35 +01:00
if ( result . performance && result . performance . total ) ui . fps . push ( 1000 / result . performance . total ) ;
2020-11-12 15:21:26 +01:00
if ( ui . fps . length > ui . maxFPSframes ) ui . fps . shift ( ) ;
2020-11-03 00:54:03 +01:00
// enable for continous performance monitoring
// console.log(result.performance);
2020-10-16 00:16:05 +02:00
2020-11-03 00:54:03 +01:00
// draw fps chart
2020-11-12 15:21:26 +01:00
await menu . updateChart ( 'FPS' , ui . fps ) ;
// get updated canvas
2020-11-13 22:13:35 +01:00
if ( ui . buffered || ! result . canvas ) result . canvas = await human . image ( input , userConfig ) ;
2020-11-12 15:21:26 +01:00
2020-10-15 15:43:16 +02:00
// draw image from video
const ctx = canvas . getContext ( '2d' ) ;
2020-10-27 15:06:01 +01:00
ctx . fillStyle = ui . baseBackground ;
ctx . fillRect ( 0 , 0 , canvas . width , canvas . height ) ;
2020-11-03 16:55:33 +01:00
if ( result . canvas ) {
if ( result . canvas . width !== canvas . width ) canvas . width = result . canvas . width ;
if ( result . canvas . height !== canvas . height ) canvas . height = result . canvas . height ;
ctx . drawImage ( result . canvas , 0 , 0 , result . canvas . width , result . canvas . height , 0 , 0 , result . canvas . width , result . canvas . height ) ;
} else {
ctx . drawImage ( input , 0 , 0 , input . width , input . height , 0 , 0 , canvas . width , canvas . height ) ;
}
2020-11-13 22:13:35 +01:00
2020-10-15 15:43:16 +02:00
// draw all results
2020-11-12 15:21:26 +01:00
await draw . face ( result . face , canvas , ui , human . facemesh . triangulation ) ;
await draw . body ( result . body , canvas , ui ) ;
await draw . hand ( result . hand , canvas , ui ) ;
await draw . gesture ( result . gesture , canvas , ui ) ;
2020-11-13 22:13:35 +01:00
await calcSimmilariry ( result . face ) ;
2020-10-15 15:43:16 +02:00
// update log
2020-10-18 02:59:43 +02:00
const engine = human . tf . engine ( ) ;
2020-11-03 16:55:33 +01:00
const gpu = engine . backendInstance ? ` gpu: ${ ( engine . backendInstance . numBytesInGPU ? engine . backendInstance . numBytesInGPU : 0 ) . toLocaleString ( ) } bytes ` : '' ;
2020-11-03 17:11:53 +01:00
const memory = ` system: ${ engine . state . numBytes . toLocaleString ( ) } bytes ${ gpu } | tensors: ${ engine . state . numTensors . toLocaleString ( ) } ` ;
2020-11-03 16:55:33 +01:00
const processing = result . canvas ? ` processing: ${ result . canvas . width } x ${ result . canvas . height } ` : '' ;
2020-11-13 22:13:35 +01:00
const avg = Math . trunc ( 10 * ui . fps . reduce ( ( a , b ) => a + b , 0 ) / ui . fps . length ) / 10 ;
2020-11-11 21:02:49 +01:00
const warning = ( ui . fps . length > 5 ) && ( avg < 5 ) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '' ;
document . getElementById ( 'log' ) . innerHTML = `
video : $ { ui . camera . name } | facing : $ { ui . camera . facing } | resolution : $ { ui . camera . width } x $ { ui . camera . height } $ { processing } < br >
backend : $ { human . tf . getBackend ( ) } | $ { memory } < br >
performance : $ { str ( result . performance ) } FPS : $ { avg } < br >
$ { warning }
2020-10-15 15:43:16 +02:00
` ;
2020-11-12 15:21:26 +01:00
ui . framesDraw ++ ;
ui . lastFrame = performance . now ( ) ;
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
if ( ui . buffered && ! ui . drawThread ) ui . drawThread = setInterval ( ( ) => drawResults ( input , canvas ) , 1000 / ui . bufferedFPSTarget ) ;
// stop buffering
if ( ! ui . buffered && ui . drawThread ) {
clearTimeout ( ui . drawThread ) ;
ui . drawThread = null ;
}
2020-10-15 15:43:16 +02:00
}
2020-10-17 16:06:02 +02:00
// setup webcam
async function setupCamera ( ) {
if ( ui . busy ) return null ;
ui . busy = true ;
const video = document . getElementById ( 'video' ) ;
const canvas = document . getElementById ( 'canvas' ) ;
const output = document . getElementById ( 'log' ) ;
const live = video . srcObject ? ( ( video . srcObject . getVideoTracks ( ) [ 0 ] . readyState === 'live' ) && ( video . readyState > 2 ) && ( ! video . paused ) ) : false ;
2020-11-04 17:43:51 +01:00
let msg = '' ;
2020-11-05 21:38:09 +01:00
status ( 'setting up camera' ) ;
2020-10-17 16:06:02 +02:00
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
if ( ! navigator . mediaDevices ) {
2020-11-03 04:15:37 +01:00
msg = 'camera access not supported' ;
2020-10-17 16:25:27 +02:00
output . innerText += ` \n ${ msg } ` ;
2020-10-17 16:06:02 +02:00
log ( msg ) ;
2020-11-03 04:15:37 +01:00
status ( msg ) ;
2020-10-17 16:06:02 +02:00
return null ;
}
let stream ;
2020-11-05 15:06:09 +01:00
const constraints = {
audio : false ,
2020-11-07 04:20:42 +01:00
video : {
2020-11-10 14:57:39 +01:00
facingMode : ui . facing ? 'user' : 'environment' ,
2020-11-08 18:26:45 +01:00
resizeMode : ui . crop ? 'crop-and-scale' : 'none' ,
2020-11-07 04:20:42 +01:00
width : { ideal : window . innerWidth } ,
height : { ideal : window . innerHeight } ,
} ,
2020-11-05 15:06:09 +01:00
} ;
2020-10-17 16:06:02 +02:00
try {
2020-11-07 04:20:42 +01:00
// if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth };
// else constraints.video.height = { ideal: window.innerHeight };
2020-11-05 15:06:09 +01:00
stream = await navigator . mediaDevices . getUserMedia ( constraints ) ;
2020-10-17 16:06:02 +02:00
} catch ( err ) {
2020-11-05 15:06:09 +01:00
if ( err . name === 'PermissionDeniedError' ) msg = 'camera permission denied' ;
else if ( err . name === 'SourceUnavailableError' ) msg = 'camera not available' ;
else msg = 'camera error' ;
output . innerText += ` \n ${ msg } ` ;
status ( msg ) ;
2020-10-17 16:06:02 +02:00
log ( err ) ;
}
if ( stream ) video . srcObject = stream ;
else return null ;
2020-11-03 16:55:33 +01:00
const track = stream . getVideoTracks ( ) [ 0 ] ;
const settings = track . getSettings ( ) ;
2020-11-08 18:32:31 +01:00
// log('camera constraints:', constraints, 'window:', { width: window.innerWidth, height: window.innerHeight }, 'settings:', settings, 'track:', track);
2020-11-10 02:13:38 +01:00
ui . camera = { name : track . label ? . toLowerCase ( ) , width : settings . width , height : settings . height , facing : settings . facingMode === 'user' ? 'front' : 'back' } ;
2020-10-17 16:06:02 +02:00
return new Promise ( ( resolve ) => {
video . onloadeddata = async ( ) => {
video . width = video . videoWidth ;
video . height = video . videoHeight ;
2020-10-27 15:06:01 +01:00
canvas . width = video . width ;
canvas . height = video . height ;
2020-11-05 21:59:28 +01:00
canvas . style . width = canvas . width > canvas . height ? '100vw' : '' ;
canvas . style . height = canvas . width > canvas . height ? '' : '100vh' ;
2020-11-08 15:56:02 +01:00
ui . menuWidth . input . setAttribute ( 'value' , video . width ) ;
ui . menuHeight . input . setAttribute ( 'value' , video . height ) ;
2020-11-07 04:20:42 +01:00
// silly font resizing for paint-on-canvas since viewport can be zoomed
const size = 14 + ( 6 * canvas . width / window . innerWidth ) ;
ui . baseFont = ui . baseFontProto . replace ( /{size}/ , ` ${ size } px ` ) ;
2020-10-17 16:06:02 +02:00
if ( live ) video . play ( ) ;
ui . busy = false ;
// do once more because onresize events can be delayed or skipped
2020-11-03 16:55:33 +01:00
// if (video.width > window.innerWidth) await setupCamera();
2020-11-05 21:38:09 +01:00
status ( '' ) ;
2020-10-17 16:06:02 +02:00
resolve ( video ) ;
} ;
} ) ;
}
// wrapper for worker.postmessage that creates worker if one does not exist
2020-11-12 23:00:06 +01:00
function webWorker ( input , image , canvas , timestamp ) {
2020-10-15 15:43:16 +02:00
if ( ! worker ) {
2020-10-16 00:16:05 +02:00
// create new webworker and add event handler only once
2020-11-05 14:21:23 +01:00
log ( 'creating worker thread' ) ;
2020-10-17 12:30:00 +02:00
worker = new Worker ( ui . worker , { type : 'module' } ) ;
2020-10-15 15:43:16 +02:00
// after receiving message from webworker, parse&draw results and send new frame for processing
2020-11-05 14:21:23 +01:00
worker . addEventListener ( 'message' , ( msg ) => {
2020-11-12 23:00:06 +01:00
if ( ui . bench ) bench . end ( ) ;
if ( ui . bench ) bench . nextFrame ( timestamp ) ;
2020-11-12 15:21:26 +01:00
lastDetectedResult = msg . data . result ;
ui . framesDetect ++ ;
if ( ! ui . drawThread ) drawResults ( input ) ;
// eslint-disable-next-line no-use-before-define
2020-11-12 23:00:06 +01:00
requestAnimationFrame ( ( now ) => runHumanDetect ( input , canvas , now ) ) ;
2020-11-05 14:21:23 +01:00
} ) ;
2020-10-15 15:43:16 +02:00
}
2020-10-16 00:16:05 +02:00
// pass image data as arraybuffer to worker by reference to avoid copy
2020-11-12 23:00:06 +01:00
if ( ui . bench ) bench . begin ( ) ;
2020-11-06 19:50:16 +01:00
worker . postMessage ( { image : image . data . buffer , width : canvas . width , height : canvas . height } , [ image . data . buffer ] ) ;
2020-10-15 15:43:16 +02:00
}
2020-10-17 16:06:02 +02:00
// main processing function when input is webcam, can use direct invocation or web worker
2020-11-12 23:00:06 +01:00
function runHumanDetect ( input , canvas , timestamp ) {
2020-11-05 21:38:09 +01:00
// if live video
const live = input . srcObject && ( input . srcObject . getVideoTracks ( ) [ 0 ] . readyState === 'live' ) && ( input . readyState > 2 ) && ( ! input . paused ) ;
2020-11-06 21:35:58 +01:00
if ( ! live && input . srcObject ) {
2020-11-12 15:21:26 +01:00
// stop ui refresh
if ( ui . drawThread ) clearTimeout ( ui . drawThread ) ;
ui . drawThread = null ;
2020-11-05 21:38:09 +01:00
// if we want to continue and camera not ready, retry in 0.5sec, else just give up
2020-11-08 18:26:45 +01:00
if ( input . paused ) log ( 'camera paused' ) ;
else if ( ( input . srcObject . getVideoTracks ( ) [ 0 ] . readyState === 'live' ) && ( input . readyState <= 2 ) ) setTimeout ( ( ) => runHumanDetect ( input , canvas ) , 500 ) ;
2020-11-05 21:38:09 +01:00
else log ( ` camera not ready: track state: ${ input . srcObject ? . getVideoTracks ( ) [ 0 ] . readyState } stream state: ${ input . readyState } ` ) ;
2020-11-12 15:21:26 +01:00
clearTimeout ( ui . drawThread ) ;
ui . drawThread = null ;
log ( 'frame statistics: drawn:' , ui . framesDraw , 'detected:' , ui . framesDetect ) ;
2020-11-05 21:38:09 +01:00
return ;
}
status ( '' ) ;
if ( ui . useWorker ) {
// get image data from video as we cannot send html objects to webworker
const offscreen = new OffscreenCanvas ( canvas . width , canvas . height ) ;
const ctx = offscreen . getContext ( '2d' ) ;
ctx . drawImage ( input , 0 , 0 , input . width , input . height , 0 , 0 , canvas . width , canvas . height ) ;
const data = ctx . getImageData ( 0 , 0 , canvas . width , canvas . height ) ;
// perform detection in worker
2020-11-12 23:00:06 +01:00
webWorker ( input , data , canvas , userConfig , timestamp ) ;
2020-11-05 21:38:09 +01:00
} else {
2020-11-12 23:00:06 +01:00
if ( ui . bench ) bench . begin ( ) ;
2020-11-09 12:32:11 +01:00
human . detect ( input , userConfig ) . then ( ( result ) => {
2020-11-12 23:00:06 +01:00
if ( ui . bench ) bench . end ( ) ;
if ( ui . bench ) bench . nextFrame ( timestamp ) ;
2020-11-05 21:38:09 +01:00
if ( result . error ) log ( result . error ) ;
2020-11-12 15:21:26 +01:00
else {
lastDetectedResult = result ;
if ( ! ui . drawThread ) drawResults ( input ) ;
ui . framesDetect ++ ;
2020-11-12 23:00:06 +01:00
requestAnimationFrame ( ( now ) => runHumanDetect ( input , canvas , now ) ) ;
2020-11-12 15:21:26 +01:00
}
2020-11-05 21:38:09 +01:00
} ) ;
2020-10-12 16:08:00 +02:00
}
}
2020-10-17 16:06:02 +02:00
// main processing function when input is image, can use direct invocation or web worker
2020-10-16 21:04:51 +02:00
async function processImage ( input ) {
2020-10-13 15:59:21 +02:00
return new Promise ( ( resolve ) => {
2020-11-03 15:34:36 +01:00
const image = new Image ( ) ;
2020-10-16 21:04:51 +02:00
image . onload = async ( ) => {
log ( 'Processing image:' , image . src ) ;
const canvas = document . getElementById ( 'canvas' ) ;
image . width = image . naturalWidth ;
image . height = image . naturalHeight ;
2020-11-06 19:50:16 +01:00
canvas . width = human . config . filter . width && human . config . filter . width > 0 ? human . config . filter . width : image . naturalWidth ;
canvas . height = human . config . filter . height && human . config . filter . height > 0 ? human . config . filter . height : image . naturalHeight ;
2020-11-09 12:32:11 +01:00
const result = await human . detect ( image , userConfig ) ;
2020-11-13 22:13:35 +01:00
lastDetectedResult = result ;
await drawResults ( image ) ;
2020-10-16 21:04:51 +02:00
const thumb = document . createElement ( 'canvas' ) ;
2020-11-03 15:34:36 +01:00
thumb . className = 'thumbnail' ;
2020-10-18 18:12:09 +02:00
thumb . width = window . innerWidth / ( ui . columns + 0.1 ) ;
2020-10-16 21:04:51 +02:00
thumb . height = canvas . height / ( window . innerWidth / thumb . width ) ;
const ctx = thumb . getContext ( '2d' ) ;
ctx . drawImage ( canvas , 0 , 0 , canvas . width , canvas . height , 0 , 0 , thumb . width , thumb . height ) ;
2020-11-03 15:34:36 +01:00
document . getElementById ( 'samples-container' ) . appendChild ( thumb ) ;
2020-10-16 21:04:51 +02:00
image . src = '' ;
resolve ( true ) ;
} ;
image . src = input ;
2020-10-13 15:59:21 +02:00
} ) ;
2020-10-12 16:59:55 +02:00
}
2020-10-17 16:06:02 +02:00
// just initialize everything and call main function
2020-10-17 13:15:23 +02:00
async function detectVideo ( ) {
2020-11-06 19:50:16 +01:00
human . config . videoOptimized = true ;
2020-11-03 15:34:36 +01:00
document . getElementById ( 'samples-container' ) . style . display = 'none' ;
2020-10-17 13:15:23 +02:00
document . getElementById ( 'canvas' ) . style . display = 'block' ;
const video = document . getElementById ( 'video' ) ;
const canvas = document . getElementById ( 'canvas' ) ;
ui . baseLineHeight = ui . baseLineHeightProto ;
2020-10-17 16:25:27 +02:00
if ( ( video . srcObject !== null ) && ! video . paused ) {
2020-11-03 04:15:37 +01:00
document . getElementById ( 'play' ) . style . display = 'block' ;
status ( 'paused' ) ;
2020-10-17 13:15:23 +02:00
video . pause ( ) ;
} else {
await setupCamera ( ) ;
2020-11-03 04:15:37 +01:00
document . getElementById ( 'play' ) . style . display = 'none' ;
status ( '' ) ;
2020-10-17 13:15:23 +02:00
video . play ( ) ;
}
runHumanDetect ( video , canvas ) ;
}
2020-10-17 16:06:02 +02:00
// just initialize everything and call main function
2020-10-16 21:04:51 +02:00
async function detectSampleImages ( ) {
2020-11-03 04:15:37 +01:00
document . getElementById ( 'play' ) . style . display = 'none' ;
2020-11-06 19:50:16 +01:00
human . config . videoOptimized = false ;
2020-11-07 04:20:42 +01:00
const size = 12 + Math . trunc ( 12 * ui . columns * window . innerWidth / document . body . clientWidth ) ;
2020-11-05 21:38:09 +01:00
ui . baseFont = ui . baseFontProto . replace ( /{size}/ , ` ${ size } px ` ) ;
2020-10-17 13:15:23 +02:00
ui . baseLineHeight = ui . baseLineHeightProto * ui . columns ;
2020-10-16 21:04:51 +02:00
document . getElementById ( 'canvas' ) . style . display = 'none' ;
2020-11-03 15:34:36 +01:00
document . getElementById ( 'samples-container' ) . style . display = 'block' ;
2020-10-16 21:04:51 +02:00
log ( 'Running detection of sample images' ) ;
2020-11-03 15:34:36 +01:00
status ( 'processing images' ) ;
document . getElementById ( 'samples-container' ) . innerHTML = '' ;
2020-11-13 22:13:35 +01:00
for ( const image of ui . samples ) await processImage ( image ) ;
2020-11-03 15:34:36 +01:00
status ( '' ) ;
2020-10-17 13:15:23 +02:00
}
2020-10-18 02:59:43 +02:00
function setupMenu ( ) {
2020-11-13 22:13:35 +01:00
document . getElementById ( 'compare-container' ) . style . display = human . config . face . embedding . enabled ? 'block' : 'none' ;
2020-11-09 14:57:24 +01:00
menu = new Menu ( document . body , '' , { top : '1rem' , right : '1rem' } ) ;
2020-11-07 17:25:03 +01:00
const btn = menu . addButton ( 'start video' , 'pause video' , ( ) => detectVideo ( ) ) ;
menu . addButton ( 'process images' , 'process images' , ( ) => detectSampleImages ( ) ) ;
2020-11-03 04:15:37 +01:00
document . getElementById ( 'play' ) . addEventListener ( 'click' , ( ) => btn . click ( ) ) ;
2020-10-18 02:59:43 +02:00
2020-10-18 18:12:09 +02:00
menu . addHTML ( '<hr style="min-width: 200px; border-style: inset; border-color: dimgray">' ) ;
2020-11-10 02:13:38 +01:00
menu . addList ( 'backend' , [ 'cpu' , 'webgl' , 'wasm' ] , human . config . backend , ( val ) => human . config . backend = val ) ;
2020-11-08 18:26:45 +01:00
menu . addBool ( 'async operations' , human . config , 'async' , ( val ) => human . config . async = val ) ;
menu . addBool ( 'enable profiler' , human . config , 'profile' , ( val ) => human . config . profile = val ) ;
menu . addBool ( 'memory shield' , human . config , 'deallocate' , ( val ) => human . config . deallocate = val ) ;
menu . addBool ( 'use web worker' , ui , 'useWorker' ) ;
2020-10-18 02:59:43 +02:00
menu . addHTML ( '<hr style="min-width: 200px; border-style: inset; border-color: dimgray">' ) ;
2020-11-08 18:26:45 +01:00
menu . addLabel ( 'enabled models' ) ;
menu . addBool ( 'face detect' , human . config . face , 'enabled' ) ;
menu . addBool ( 'face mesh' , human . config . face . mesh , 'enabled' ) ;
menu . addBool ( 'face iris' , human . config . face . iris , 'enabled' ) ;
menu . addBool ( 'face age' , human . config . face . age , 'enabled' ) ;
menu . addBool ( 'face gender' , human . config . face . gender , 'enabled' ) ;
menu . addBool ( 'face emotion' , human . config . face . emotion , 'enabled' ) ;
menu . addBool ( 'body pose' , human . config . body , 'enabled' ) ;
menu . addBool ( 'hand pose' , human . config . hand , 'enabled' ) ;
menu . addBool ( 'gesture analysis' , human . config . gesture , 'enabled' ) ;
2020-10-18 02:59:43 +02:00
menu . addHTML ( '<hr style="min-width: 200px; border-style: inset; border-color: dimgray">' ) ;
2020-11-08 18:26:45 +01:00
menu . addLabel ( 'model parameters' ) ;
menu . addRange ( 'max objects' , human . config . face . detector , 'maxFaces' , 1 , 50 , 1 , ( val ) => {
2020-11-06 19:50:16 +01:00
human . config . face . detector . maxFaces = parseInt ( val ) ;
human . config . body . maxDetections = parseInt ( val ) ;
human . config . hand . maxHands = parseInt ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2020-11-08 18:26:45 +01:00
menu . addRange ( 'skip frames' , human . config . face . detector , 'skipFrames' , 0 , 50 , 1 , ( val ) => {
2020-11-06 19:50:16 +01:00
human . config . face . detector . skipFrames = parseInt ( val ) ;
human . config . face . emotion . skipFrames = parseInt ( val ) ;
human . config . face . age . skipFrames = parseInt ( val ) ;
human . config . hand . skipFrames = parseInt ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2020-11-08 18:26:45 +01:00
menu . addRange ( 'min confidence' , human . config . face . detector , 'minConfidence' , 0.0 , 1.0 , 0.05 , ( val ) => {
2020-11-06 19:50:16 +01:00
human . config . face . detector . minConfidence = parseFloat ( val ) ;
2020-11-06 21:35:58 +01:00
human . config . face . gender . minConfidence = parseFloat ( val ) ;
2020-11-06 19:50:16 +01:00
human . config . face . emotion . minConfidence = parseFloat ( val ) ;
human . config . hand . minConfidence = parseFloat ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2020-11-08 18:26:45 +01:00
menu . addRange ( 'score threshold' , human . config . face . detector , 'scoreThreshold' , 0.1 , 1.0 , 0.05 , ( val ) => {
2020-11-06 19:50:16 +01:00
human . config . face . detector . scoreThreshold = parseFloat ( val ) ;
human . config . hand . scoreThreshold = parseFloat ( val ) ;
human . config . body . scoreThreshold = parseFloat ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2020-11-08 18:26:45 +01:00
menu . addRange ( 'overlap' , human . config . face . detector , 'iouThreshold' , 0.1 , 1.0 , 0.05 , ( val ) => {
2020-11-06 19:50:16 +01:00
human . config . face . detector . iouThreshold = parseFloat ( val ) ;
human . config . hand . iouThreshold = parseFloat ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2020-10-18 02:59:43 +02:00
menu . addHTML ( '<hr style="min-width: 200px; border-style: inset; border-color: dimgray">' ) ;
menu . addChart ( 'FPS' , 'FPS' ) ;
2020-10-18 18:12:09 +02:00
2020-11-09 14:57:24 +01:00
menuFX = new Menu ( document . body , '' , { top : '1rem' , right : '18rem' } ) ;
2020-11-08 18:26:45 +01:00
menuFX . addLabel ( 'ui options' ) ;
2020-11-12 15:21:26 +01:00
menuFX . addBool ( 'buffered output' , ui , 'buffered' , ( val ) => ui . buffered = val ) ;
2020-11-08 18:26:45 +01:00
menuFX . addBool ( 'crop & scale' , ui , 'crop' , ( ) => setupCamera ( ) ) ;
menuFX . addBool ( 'camera front/back' , ui , 'facing' , ( ) => setupCamera ( ) ) ;
menuFX . addBool ( 'use 3D depth' , ui , 'useDepth' ) ;
menuFX . addBool ( 'draw boxes' , ui , 'drawBoxes' ) ;
menuFX . addBool ( 'draw polygons' , ui , 'drawPolygons' ) ;
2020-10-18 18:12:09 +02:00
menuFX . addBool ( 'Fill Polygons' , ui , 'fillPolygons' ) ;
2020-11-08 18:26:45 +01:00
menuFX . addBool ( 'draw points' , ui , 'drawPoints' ) ;
2020-10-18 18:12:09 +02:00
menuFX . addHTML ( '<hr style="min-width: 200px; border-style: inset; border-color: dimgray">' ) ;
2020-11-08 18:26:45 +01:00
menuFX . addLabel ( 'image processing' ) ;
menuFX . addBool ( 'enabled' , human . config . filter , 'enabled' ) ;
ui . menuWidth = menuFX . addRange ( 'image width' , human . config . filter , 'width' , 0 , 3840 , 10 , ( val ) => human . config . filter . width = parseInt ( val ) ) ;
ui . menuHeight = menuFX . addRange ( 'image height' , human . config . filter , 'height' , 0 , 2160 , 10 , ( val ) => human . config . filter . height = parseInt ( val ) ) ;
menuFX . addRange ( 'brightness' , human . config . filter , 'brightness' , - 1.0 , 1.0 , 0.05 , ( val ) => human . config . filter . brightness = parseFloat ( val ) ) ;
menuFX . addRange ( 'contrast' , human . config . filter , 'contrast' , - 1.0 , 1.0 , 0.05 , ( val ) => human . config . filter . contrast = parseFloat ( val ) ) ;
menuFX . addRange ( 'sharpness' , human . config . filter , 'sharpness' , 0 , 1.0 , 0.05 , ( val ) => human . config . filter . sharpness = parseFloat ( val ) ) ;
menuFX . addRange ( 'blur' , human . config . filter , 'blur' , 0 , 20 , 1 , ( val ) => human . config . filter . blur = parseInt ( val ) ) ;
menuFX . addRange ( 'saturation' , human . config . filter , 'saturation' , - 1.0 , 1.0 , 0.05 , ( val ) => human . config . filter . saturation = parseFloat ( val ) ) ;
menuFX . addRange ( 'hue' , human . config . filter , 'hue' , 0 , 360 , 5 , ( val ) => human . config . filter . hue = parseInt ( val ) ) ;
menuFX . addRange ( 'pixelate' , human . config . filter , 'pixelate' , 0 , 32 , 1 , ( val ) => human . config . filter . pixelate = parseInt ( val ) ) ;
menuFX . addBool ( 'negative' , human . config . filter , 'negative' ) ;
menuFX . addBool ( 'sepia' , human . config . filter , 'sepia' ) ;
menuFX . addBool ( 'vintage' , human . config . filter , 'vintage' ) ;
menuFX . addBool ( 'kodachrome' , human . config . filter , 'kodachrome' ) ;
menuFX . addBool ( 'technicolor' , human . config . filter , 'technicolor' ) ;
menuFX . addBool ( 'polaroid' , human . config . filter , 'polaroid' ) ;
2020-10-16 21:04:51 +02:00
}
2020-11-12 23:00:06 +01:00
async function setupMonitor ( ) {
let gl = human . tf . engine ( ) . backend . gpgpu ;
if ( ! gl ) gl = document . getElementById ( 'bench-canvas' ) . getContext ( 'webgl2' ) ;
if ( ! bench ) {
bench = new GLBench ( gl , {
trackGPU : true ,
chartHz : 20 ,
chartLen : 50 ,
} ) ;
}
/ *
function update ( now ) {
bench . nextFrame ( now ) ;
requestAnimationFrame ( update ) ;
}
requestAnimationFrame ( update ) ;
* /
// class MathBackendWebGL extends tf.KernelBackend property gpgpu is gl context
}
2020-10-12 16:08:00 +02:00
async function main ( ) {
2020-11-03 15:34:36 +01:00
log ( 'Human: demo starting ...' ) ;
2020-10-18 02:59:43 +02:00
setupMenu ( ) ;
2020-11-12 23:00:06 +01:00
setupMonitor ( ) ;
2020-11-03 17:11:53 +01:00
document . getElementById ( 'log' ) . innerText = ` Human: version ${ human . version } TensorFlow/JS: version ${ human . tf . version _core } ` ;
2020-11-12 15:21:26 +01:00
// human.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
2020-11-09 12:32:11 +01:00
// this is not required, just pre-loads all models
2020-11-05 15:06:09 +01:00
if ( ui . modelsPreload ) {
2020-11-04 16:18:22 +01:00
status ( 'loading' ) ;
2020-11-09 12:32:11 +01:00
await human . load ( userConfig ) ;
2020-11-04 16:18:22 +01:00
}
2020-11-09 12:32:11 +01:00
// this is not required, just pre-warms all models for faster initial inference
2020-11-05 15:06:09 +01:00
if ( ui . modelsWarmup ) {
2020-11-04 16:18:22 +01:00
status ( 'initializing' ) ;
2020-11-13 22:13:35 +01:00
sample = await human . warmup ( userConfig , document . getElementById ( 'sample-image' ) ) ;
2020-11-04 16:18:22 +01:00
}
2020-11-03 04:15:37 +01:00
status ( 'human: ready' ) ;
2020-11-03 15:34:36 +01:00
document . getElementById ( 'loader' ) . style . display = 'none' ;
document . getElementById ( 'play' ) . style . display = 'block' ;
2020-10-12 16:08:00 +02:00
}
window . onload = main ;
2020-10-16 16:12:12 +02:00
window . onresize = setupCamera ;