2021-05-25 14:58:20 +02:00
/ * *
* Human demo for browsers
*
2021-05-30 18:03:34 +02:00
* @ description Main demo app that exposes all Human functionality
*
* @ params Optional URL parameters :
* image = < imagePath : string > : perform detection on specific image and finish
* worker = < true | false > : use WebWorkers
* backend = < webgl | wasm | cpu > : use specific TF backend for operations
* preload = < true | false > : pre - load all configured models
* warmup = < true | false > : warmup all configured models
*
* @ example < https : //wyse:10031/?backend=wasm&worker=true&image="/assets/sample-me.jpg">
*
* @ configuration
* userConfig = { } : contains all model configuration used by human
* drawOptions = { } : contains all draw variables used by human . draw
* ui = { } : contains all variables exposed in the UI
2021-05-25 14:58:20 +02:00
* /
2021-04-12 14:29:52 +02:00
2021-08-05 16:38:04 +02:00
// test url <https://human.local/?worker=false&async=false&bench=false&draw=true&warmup=full&backend=humangl>
2021-05-30 18:03:34 +02:00
// @ts-nocheck // typescript checks disabled as this is pure javascript
2021-03-29 20:40:34 +02:00
import Human from '../dist/human.esm.js' ; // equivalent of @vladmandic/human
import Menu from './helpers/menu.js' ;
import GLBench from './helpers/gl-bench.js' ;
2021-04-12 23:48:59 +02:00
import webRTC from './helpers/webrtc.js' ;
2021-09-15 17:15:38 +02:00
import jsonView from './helpers/jsonview.js' ;
2020-10-12 16:08:00 +02:00
2021-04-09 16:02:40 +02:00
let human ;
2021-01-11 20:35:57 +01:00
2021-06-05 22:13:41 +02:00
let userConfig = {
2021-05-30 18:03:34 +02:00
warmup : 'none' ,
2021-05-31 05:21:48 +02:00
backend : 'humangl' ,
2021-09-17 17:23:00 +02:00
debug : true ,
2021-09-17 20:07:44 +02:00
filter : { enabled : false } ,
2021-05-31 00:45:39 +02:00
/ *
2021-08-31 19:00:06 +02:00
wasmPath : 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/' ,
2021-05-29 15:20:01 +02:00
async : false ,
2021-09-02 14:50:16 +02:00
cacheSensitivity : 0.75 ,
2021-04-19 22:02:47 +02:00
filter : {
2021-04-24 22:04:49 +02:00
enabled : false ,
2021-04-19 22:19:03 +02:00
flip : false ,
2021-04-19 22:02:47 +02:00
} ,
2021-08-21 02:43:03 +02:00
face : { enabled : false ,
2021-08-14 17:16:26 +02:00
detector : { return : false , rotation : true } ,
2021-03-27 15:25:31 +01:00
mesh : { enabled : true } ,
2021-08-14 17:16:26 +02:00
iris : { enabled : true } ,
2021-05-31 16:40:07 +02:00
description : { enabled : false } ,
emotion : { enabled : false } ,
2021-03-27 15:25:31 +01:00
} ,
2021-06-04 19:51:01 +02:00
object : { enabled : false } ,
2021-08-21 02:43:03 +02:00
gesture : { enabled : true } ,
2021-09-02 14:50:16 +02:00
hand : { enabled : true } ,
2021-06-04 19:51:01 +02:00
body : { enabled : false } ,
2021-08-20 15:05:07 +02:00
// body: { enabled: true, modelPath: 'movenet-multipose.json' },
2021-05-29 15:20:01 +02:00
// body: { enabled: true, modelPath: 'posenet.json' },
2021-06-11 22:12:24 +02:00
segmentation : { enabled : false } ,
2021-09-02 14:50:16 +02:00
/ *
2021-05-31 00:45:39 +02:00
* /
2020-12-17 00:36:24 +01:00
} ;
2020-11-09 12:32:11 +01:00
2021-05-23 19:52:49 +02:00
const drawOptions = {
2021-05-31 05:21:48 +02:00
bufferedOutput : true , // makes draw functions interpolate results between each detection for smoother movement
2021-05-31 16:40:07 +02:00
drawBoxes : true ,
drawGaze : true ,
drawLabels : true ,
drawPolygons : true ,
drawPoints : false ,
2021-05-23 19:52:49 +02:00
} ;
2020-10-17 16:06:02 +02:00
// ui options
2020-10-13 15:59:21 +02:00
const ui = {
2021-05-05 16:07:44 +02:00
// configurable items
console : true , // log messages to browser console
2021-05-28 16:43:48 +02:00
crop : false , // video mode crop to size or leave full frame
2021-03-05 17:43:50 +01:00
facing : true , // camera facing front or back
2021-05-05 16:07:44 +02:00
baseBackground : 'rgba(50, 50, 50, 1)' , // 'grey'
columns : 2 , // when processing sample images create this many columns
2021-05-30 18:03:34 +02:00
useWorker : true , // use web workers for processing
2021-03-29 20:40:34 +02:00
worker : 'index-worker.js' ,
2021-03-05 17:43:50 +01:00
maxFPSframes : 10 , // keep fps history for how many frames
2021-09-17 20:07:44 +02:00
modelsPreload : false , // preload human models on startup
2021-06-18 15:16:21 +02:00
modelsWarmup : false , // warmup human models on startup
2021-05-22 18:33:19 +02:00
buffered : true , // should output be buffered between frames
2021-06-02 18:43:43 +02:00
interpolated : true , // should output be interpolated for smoothness between frames
2021-05-22 19:17:07 +02:00
iconSize : '48px' , // ui icon sizes
2021-09-02 14:50:16 +02:00
autoPlay : false , // start webcam & detection on load
2021-05-17 05:55:08 +02:00
2021-05-05 16:07:44 +02:00
// internal variables
2021-03-05 17:43:50 +01:00
busy : false , // internal camera busy flag
menuWidth : 0 , // internal
menuHeight : 0 , // internal
camera : { } , // internal, holds details of webcam details
detectFPS : [ ] , // internal, holds fps values for detection performance
drawFPS : [ ] , // internal, holds fps values for draw performance
2021-05-05 16:07:44 +02:00
drawWarmup : false , // debug only, should warmup image processing be displayed on startup
2021-03-06 16:38:04 +01:00
drawThread : null , // internl, perform draw operations in a separate thread
detectThread : null , // internl, perform detect operations in a separate thread
2021-06-02 23:29:50 +02:00
hintsThread : null , // internal, draw random hints
2021-03-05 17:43:50 +01:00
framesDraw : 0 , // internal, statistics on frames drawn
framesDetect : 0 , // internal, statistics on frames detected
2021-03-06 16:38:04 +01:00
bench : true , // show gl fps benchmark window
2021-09-15 17:15:38 +02:00
results : false , // show results tree
2021-03-05 17:43:50 +01:00
lastFrame : 0 , // time of last frame processing
2021-04-19 15:30:04 +02:00
viewportSet : false , // internal, has custom viewport been set
2021-06-05 22:13:41 +02:00
background : null , // holds instance of segmentation background image
2021-05-17 05:55:08 +02:00
// webrtc
useWebRTC : false , // use webrtc as camera source instead of local webcam
webRTCServer : 'http://localhost:8002' ,
webRTCStream : 'reowhite' ,
// sample images
2021-06-02 23:29:50 +02:00
compare : '../samples/ai-face.jpg' , // base image for face compare
samples : [ ] ,
2020-10-13 15:59:21 +02:00
} ;
2021-05-30 23:56:40 +02:00
const pwa = {
enabled : true ,
cacheName : 'Human' ,
scriptFile : 'index-pwa.js' ,
cacheModels : true ,
cacheWASM : true ,
cacheOther : false ,
} ;
2021-06-02 23:29:50 +02:00
// hints
const hints = [
'for optimal performance disable unused modules' ,
'with modern gpu best backend is webgl otherwise select wasm backend' ,
'you can process images by dragging and dropping them in browser window' ,
'video input can be webcam or any other video source' ,
'check out other demos such as face-matching and face-3d' ,
'you can edit input image or video on-the-fly using filters' ,
'library status messages are logged in browser console' ,
] ;
2020-10-17 16:06:02 +02:00
// global variables
2020-11-19 20:45:59 +01:00
const menu = { } ;
2020-10-15 15:43:16 +02:00
let worker ;
2020-11-12 23:00:06 +01:00
let bench ;
2020-11-12 15:21:26 +01:00
let lastDetectedResult = { } ;
2020-10-12 16:08:00 +02:00
2020-10-17 16:06:02 +02:00
// helper function: translates json to human readable string
2020-10-15 00:22:38 +02:00
function str ( ... msg ) {
if ( ! Array . isArray ( msg ) ) return msg ;
let line = '' ;
for ( const entry of msg ) {
if ( typeof entry === 'object' ) line += JSON . stringify ( entry ) . replace ( /{|}|"|\[|\]/g , '' ) . replace ( /,/g , ', ' ) ;
else line += entry ;
}
return line ;
}
2020-10-17 16:06:02 +02:00
// helper function: wrapper around console output
2020-11-25 15:13:19 +01:00
function log ( ... msg ) {
2020-11-16 21:51:46 +01:00
const dt = new Date ( ) ;
const ts = ` ${ dt . getHours ( ) . toString ( ) . padStart ( 2 , '0' ) } : ${ dt . getMinutes ( ) . toString ( ) . padStart ( 2 , '0' ) } : ${ dt . getSeconds ( ) . toString ( ) . padStart ( 2 , '0' ) } . ${ dt . getMilliseconds ( ) . toString ( ) . padStart ( 3 , '0' ) } ` ;
2020-10-15 21:25:58 +02:00
// eslint-disable-next-line no-console
2020-11-16 21:51:46 +01:00
if ( ui . console ) console . log ( ts , ... msg ) ;
2020-11-25 15:13:19 +01:00
}
2020-10-15 14:16:34 +02:00
2020-11-25 15:13:19 +01:00
function status ( msg ) {
2021-03-05 17:43:50 +01:00
const div = document . getElementById ( 'status' ) ;
2021-04-26 13:19:30 +02:00
if ( div && msg && msg . length > 0 ) {
log ( 'status' , msg ) ;
document . getElementById ( 'play' ) . style . display = 'none' ;
document . getElementById ( 'loader' ) . style . display = 'block' ;
div . innerText = msg ;
} else {
const video = document . getElementById ( 'video' ) ;
2021-08-12 00:59:02 +02:00
const playing = ( video . srcObject !== null ) && ! video . paused ;
document . getElementById ( 'play' ) . style . display = playing ? 'none' : 'block' ;
2021-04-26 13:19:30 +02:00
document . getElementById ( 'loader' ) . style . display = 'none' ;
div . innerText = '' ;
}
2020-11-25 15:13:19 +01:00
}
2020-11-03 04:15:37 +01:00
2021-09-17 17:23:00 +02:00
async function videoPlay ( ) {
document . getElementById ( 'btnStartText' ) . innerHTML = 'pause video' ;
await document . getElementById ( 'video' ) . play ( ) ;
2021-09-17 20:07:44 +02:00
// status();
2021-09-17 17:23:00 +02:00
}
async function videoPause ( ) {
document . getElementById ( 'btnStartText' ) . innerHTML = 'start video' ;
await document . getElementById ( 'video' ) . pause ( ) ;
status ( 'paused' ) ;
document . getElementById ( 'play' ) . style . display = 'block' ;
document . getElementById ( 'loader' ) . style . display = 'none' ;
}
2021-03-21 22:47:00 +01:00
const compare = { enabled : false , original : null } ;
2021-07-29 22:06:03 +02:00
async function calcSimmilarity ( result ) {
2021-03-21 22:47:00 +01:00
document . getElementById ( 'compare-container' ) . style . display = compare . enabled ? 'block' : 'none' ;
if ( ! compare . enabled ) return ;
2021-07-29 22:06:03 +02:00
if ( ! result || ! result . face || ! result . face [ 0 ] || ! result . face [ 0 ] . embedding ) return ;
2021-04-19 01:33:40 +02:00
if ( ! ( result . face . length > 0 ) || ( result . face [ 0 ] . embedding . length <= 64 ) ) return ;
2021-03-21 22:47:00 +01:00
if ( ! compare . original ) {
compare . original = result ;
log ( 'setting face compare baseline:' , result . face [ 0 ] ) ;
2021-03-12 18:54:08 +01:00
if ( result . face [ 0 ] . tensor ) {
const enhanced = human . enhance ( result . face [ 0 ] ) ;
if ( enhanced ) {
const c = document . getElementById ( 'orig' ) ;
2021-07-29 22:06:03 +02:00
const squeeze = human . tf . squeeze ( enhanced ) ;
const norm = human . tf . div ( squeeze , 255 ) ;
2021-03-21 22:47:00 +01:00
human . tf . browser . toPixels ( norm , c ) ;
2021-07-29 22:06:03 +02:00
human . tf . dispose ( enhanced ) ;
human . tf . dispose ( squeeze ) ;
human . tf . dispose ( norm ) ;
2021-03-12 18:54:08 +01:00
}
} else {
2021-03-21 22:47:00 +01:00
document . getElementById ( 'compare-canvas' ) . getContext ( '2d' ) . drawImage ( compare . original . canvas , 0 , 0 , 200 , 200 ) ;
2021-03-12 18:54:08 +01:00
}
2020-11-13 22:13:35 +01:00
}
2021-04-19 01:33:40 +02:00
const similarity = human . similarity ( compare . original . face [ 0 ] . embedding , result . face [ 0 ] . embedding ) ;
2021-03-21 19:18:51 +01:00
document . getElementById ( 'similarity' ) . innerText = ` similarity: ${ Math . trunc ( 1000 * similarity ) / 10 } % ` ;
2020-11-13 22:13:35 +01:00
}
2021-08-15 00:00:26 +02:00
const isLive = ( input ) => {
const videoLive = input . readyState > 2 ;
const cameraLive = input . srcObject ? . getVideoTracks ( ) [ 0 ] . readyState === 'live' ;
const live = ( videoLive || cameraLive ) && ( ! input . paused ) ;
return live ;
} ;
2020-10-17 16:06:02 +02:00
// draws processed results and starts processing of a next frame
2020-11-14 23:22:59 +01:00
let lastDraw = performance . now ( ) ;
2020-11-12 15:21:26 +01:00
async function drawResults ( input ) {
const result = lastDetectedResult ;
const canvas = document . getElementById ( 'canvas' ) ;
2020-11-14 23:22:59 +01:00
// update draw fps data
ui . drawFPS . push ( 1000 / ( performance . now ( ) - lastDraw ) ) ;
if ( ui . drawFPS . length > ui . maxFPSframes ) ui . drawFPS . shift ( ) ;
lastDraw = performance . now ( ) ;
2020-11-03 00:54:03 +01:00
// draw fps chart
2020-11-19 20:45:59 +01:00
await menu . process . updateChart ( 'FPS' , ui . detectFPS ) ;
2020-11-12 15:21:26 +01:00
2021-06-05 23:51:46 +02:00
if ( userConfig . segmentation . enabled && ui . buffered ) { // refresh segmentation if using buffered output
2021-06-05 22:13:41 +02:00
result . canvas = await human . segmentation ( input , ui . background , userConfig ) ;
2021-06-05 23:51:46 +02:00
} else if ( ! result . canvas || ui . buffered ) { // refresh with input if using buffered output or if missing canvas
2021-03-06 16:38:04 +01:00
const image = await human . image ( input ) ;
result . canvas = image . canvas ;
human . tf . dispose ( image . tensor ) ;
}
2020-11-12 15:21:26 +01:00
2020-10-15 15:43:16 +02:00
// draw image from video
const ctx = canvas . getContext ( '2d' ) ;
2020-10-27 15:06:01 +01:00
ctx . fillStyle = ui . baseBackground ;
ctx . fillRect ( 0 , 0 , canvas . width , canvas . height ) ;
2020-11-03 16:55:33 +01:00
if ( result . canvas ) {
if ( result . canvas . width !== canvas . width ) canvas . width = result . canvas . width ;
if ( result . canvas . height !== canvas . height ) canvas . height = result . canvas . height ;
ctx . drawImage ( result . canvas , 0 , 0 , result . canvas . width , result . canvas . height , 0 , 0 , result . canvas . width , result . canvas . height ) ;
} else {
ctx . drawImage ( input , 0 , 0 , input . width , input . height , 0 , 0 , canvas . width , canvas . height ) ;
}
2020-11-13 22:13:35 +01:00
2021-05-31 16:40:07 +02:00
// draw all results using interpolated results
2021-06-02 18:43:43 +02:00
if ( ui . interpolated ) {
const interpolated = human . next ( result ) ;
human . draw . all ( canvas , interpolated , drawOptions ) ;
} else {
human . draw . all ( canvas , result , drawOptions ) ;
}
2021-09-15 17:15:38 +02:00
// show tree with results
if ( ui . results ) {
const div = document . getElementById ( 'results' ) ;
div . innerHTML = '' ;
jsonView ( result , div , 'Results' , [ 'canvas' , 'timestamp' ] ) ;
}
2021-05-31 16:40:07 +02:00
/ * a l t e r n a t i v e l y u s e i n d i v i d u a l f u n c t i o n s
2021-03-06 23:22:47 +01:00
human . draw . face ( canvas , result . face ) ;
human . draw . body ( canvas , result . body ) ;
human . draw . hand ( canvas , result . hand ) ;
2021-03-17 16:32:37 +01:00
human . draw . object ( canvas , result . object ) ;
2021-03-06 23:22:47 +01:00
human . draw . gesture ( canvas , result . gesture ) ;
2021-05-22 19:17:07 +02:00
* /
2021-05-31 16:40:07 +02:00
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
const person = result . persons ; // explicitly invoke person getter
2021-07-29 22:06:03 +02:00
await calcSimmilarity ( result ) ;
2020-11-13 22:13:35 +01:00
2020-10-15 15:43:16 +02:00
// update log
2020-10-18 02:59:43 +02:00
const engine = human . tf . engine ( ) ;
2020-11-03 16:55:33 +01:00
const gpu = engine . backendInstance ? ` gpu: ${ ( engine . backendInstance . numBytesInGPU ? engine . backendInstance . numBytesInGPU : 0 ) . toLocaleString ( ) } bytes ` : '' ;
2020-11-03 17:11:53 +01:00
const memory = ` system: ${ engine . state . numBytes . toLocaleString ( ) } bytes ${ gpu } | tensors: ${ engine . state . numTensors . toLocaleString ( ) } ` ;
2020-11-03 16:55:33 +01:00
const processing = result . canvas ? ` processing: ${ result . canvas . width } x ${ result . canvas . height } ` : '' ;
2021-06-02 18:43:43 +02:00
const avgDetect = ui . detectFPS . length > 0 ? Math . trunc ( 10 * ui . detectFPS . reduce ( ( a , b ) => a + b , 0 ) / ui . detectFPS . length ) / 10 : 0 ;
const avgDraw = ui . drawFPS . length > 0 ? Math . trunc ( 10 * ui . drawFPS . reduce ( ( a , b ) => a + b , 0 ) / ui . drawFPS . length ) / 10 : 0 ;
2021-06-06 18:58:06 +02:00
const warning = ( ui . detectFPS . length > 5 ) && ( avgDetect < 2 ) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '' ;
2021-06-02 18:43:43 +02:00
const fps = avgDetect > 0 ? ` FPS process: ${ avgDetect } refresh: ${ avgDraw } ` : '' ;
2021-09-17 17:23:00 +02:00
const backend = engine . state . numTensors > 0 ? ` ${ human . tf . getBackend ( ) } | ${ memory } ` : ` ${ result . backend } | tensors: ${ result . tensors } in worker ` ;
2020-11-26 16:37:04 +01:00
document . getElementById ( 'log' ) . innerHTML = `
video : $ { ui . camera . name } | facing : $ { ui . camera . facing } | screen : $ { window . innerWidth } x $ { window . innerHeight } camera : $ { ui . camera . width } x $ { ui . camera . height } $ { processing } < br >
2021-08-12 00:59:02 +02:00
backend : $ { backend } < br >
2021-06-02 18:43:43 +02:00
performance : $ { str ( lastDetectedResult . performance ) } ms $ { fps } < br >
2020-11-26 16:37:04 +01:00
$ { warning } < br >
2020-10-15 15:43:16 +02:00
` ;
2020-11-12 15:21:26 +01:00
ui . framesDraw ++ ;
ui . lastFrame = performance . now ( ) ;
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
2020-11-19 20:45:59 +01:00
if ( ui . buffered ) {
2021-08-15 00:00:26 +02:00
if ( isLive ( input ) ) {
ui . drawThread = requestAnimationFrame ( ( ) => drawResults ( input ) ) ;
} else {
cancelAnimationFrame ( ui . drawThread ) ;
ui . drawThread = null ;
}
2021-05-30 18:03:34 +02:00
} else {
2021-06-04 19:51:01 +02:00
if ( ui . drawThread ) {
log ( 'stopping buffered refresh' ) ;
cancelAnimationFrame ( ui . drawThread ) ;
2021-08-15 00:00:26 +02:00
ui . drawThread = null ;
2021-06-04 19:51:01 +02:00
}
2020-11-12 15:21:26 +01:00
}
2020-10-15 15:43:16 +02:00
}
2020-10-17 16:06:02 +02:00
// setup webcam
2021-05-26 14:52:31 +02:00
let initialCameraAccess = true ;
2020-10-17 16:06:02 +02:00
async function setupCamera ( ) {
if ( ui . busy ) return null ;
ui . busy = true ;
const video = document . getElementById ( 'video' ) ;
const canvas = document . getElementById ( 'canvas' ) ;
const output = document . getElementById ( 'log' ) ;
2021-04-12 23:48:59 +02:00
if ( ui . useWebRTC ) {
status ( 'setting up webrtc connection' ) ;
try {
video . onloadeddata = ( ) => ui . camera = { name : ui . webRTCStream , width : video . videoWidth , height : video . videoHeight , facing : 'default' } ;
await webRTC ( ui . webRTCServer , ui . webRTCStream , video ) ;
} catch ( err ) {
log ( err ) ;
} finally {
2021-09-17 20:07:44 +02:00
// status();
2021-04-12 23:48:59 +02:00
}
return '' ;
}
2020-10-17 16:06:02 +02:00
const live = video . srcObject ? ( ( video . srcObject . getVideoTracks ( ) [ 0 ] . readyState === 'live' ) && ( video . readyState > 2 ) && ( ! video . paused ) ) : false ;
2020-11-04 17:43:51 +01:00
let msg = '' ;
2020-11-05 21:38:09 +01:00
status ( 'setting up camera' ) ;
2020-10-17 16:06:02 +02:00
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
if ( ! navigator . mediaDevices ) {
2020-11-03 04:15:37 +01:00
msg = 'camera access not supported' ;
2020-10-17 16:25:27 +02:00
output . innerText += ` \n ${ msg } ` ;
2020-10-17 16:06:02 +02:00
log ( msg ) ;
2020-11-03 04:15:37 +01:00
status ( msg ) ;
2020-11-20 13:52:50 +01:00
ui . busy = false ;
return msg ;
2020-10-17 16:06:02 +02:00
}
let stream ;
2020-11-05 15:06:09 +01:00
const constraints = {
audio : false ,
2021-05-26 13:57:51 +02:00
video : {
facingMode : ui . facing ? 'user' : 'environment' ,
resizeMode : ui . crop ? 'crop-and-scale' : 'none' ,
2021-05-28 16:43:48 +02:00
width : { ideal : document . body . clientWidth } ,
// height: { ideal: document.body.clientHeight }, // not set as we're using aspectRation to get height instead
aspectRatio : document . body . clientWidth / document . body . clientHeight ,
2021-05-26 13:57:51 +02:00
// deviceId: 'xxxx' // if you have multiple webcams, specify one to use explicitly
} ,
2020-11-05 15:06:09 +01:00
} ;
2021-05-28 16:43:48 +02:00
// enumerate devices for diag purposes
if ( initialCameraAccess ) {
2021-08-15 00:00:26 +02:00
navigator . mediaDevices . enumerateDevices ( ) . then ( ( devices ) => log ( 'enumerated input devices:' , devices ) ) ;
2021-05-28 16:43:48 +02:00
log ( 'camera constraints' , constraints ) ;
}
2020-10-17 16:06:02 +02:00
try {
2020-11-05 15:06:09 +01:00
stream = await navigator . mediaDevices . getUserMedia ( constraints ) ;
2020-10-17 16:06:02 +02:00
} catch ( err ) {
2020-11-20 13:52:50 +01:00
if ( err . name === 'PermissionDeniedError' || err . name === 'NotAllowedError' ) msg = 'camera permission denied' ;
2020-11-05 15:06:09 +01:00
else if ( err . name === 'SourceUnavailableError' ) msg = 'camera not available' ;
2020-11-20 13:52:50 +01:00
else msg = ` camera error: ${ err . message || err } ` ;
2020-11-05 15:06:09 +01:00
output . innerText += ` \n ${ msg } ` ;
status ( msg ) ;
2020-11-20 13:52:50 +01:00
log ( 'camera error:' , err ) ;
ui . busy = false ;
return msg ;
2020-10-17 16:06:02 +02:00
}
2021-05-26 13:57:51 +02:00
const tracks = stream . getVideoTracks ( ) ;
if ( tracks && tracks . length >= 1 ) {
2021-05-28 16:43:48 +02:00
if ( initialCameraAccess ) log ( 'enumerated viable tracks:' , tracks ) ;
2021-05-26 13:57:51 +02:00
} else {
ui . busy = false ;
return 'no camera track' ;
}
2020-11-03 16:55:33 +01:00
const track = stream . getVideoTracks ( ) [ 0 ] ;
const settings = track . getSettings ( ) ;
2021-05-28 16:43:48 +02:00
if ( initialCameraAccess ) log ( 'selected video source:' , track , settings ) ; // log('selected camera:', track.label, 'id:', settings.deviceId);
ui . camera = { name : track . label . toLowerCase ( ) , width : video . videoWidth , height : video . videoHeight , facing : settings . facingMode === 'user' ? 'front' : 'back' } ;
2021-05-26 14:52:31 +02:00
initialCameraAccess = false ;
2021-09-17 20:07:44 +02:00
if ( ! stream ) return 'camera stream empty' ;
const ready = new Promise ( ( resolve ) => ( video . onloadeddata = ( ) => resolve ( true ) ) ) ;
video . srcObject = stream ;
await ready ;
if ( settings . width > settings . height ) canvas . style . width = '100vw' ;
else canvas . style . height = '100vh' ;
canvas . width = video . videoWidth ;
canvas . height = video . videoHeight ;
ui . menuWidth . input . setAttribute ( 'value' , video . videoWidth ) ;
ui . menuHeight . input . setAttribute ( 'value' , video . videoHeight ) ;
if ( live || ui . autoPlay ) await videoPlay ( ) ;
// eslint-disable-next-line no-use-before-define
if ( ( live || ui . autoPlay ) && ! ui . detectThread ) runHumanDetect ( video , canvas ) ;
return 'camera stream ready' ;
2020-10-17 16:06:02 +02:00
}
2020-11-19 20:45:59 +01:00
function initPerfMonitor ( ) {
if ( ! bench ) {
const gl = null ;
// cosnt gl = human.tf.engine().backend.gpgpu.gl;
// if (!gl) log('bench cannot get tensorflow webgl context');
bench = new GLBench ( gl , {
trackGPU : false , // this is really slow
chartHz : 20 ,
chartLen : 20 ,
} ) ;
bench . begin ( ) ;
}
}
2020-10-17 16:06:02 +02:00
// wrapper for worker.postmessage that creates worker if one does not exist
2020-11-12 23:00:06 +01:00
function webWorker ( input , image , canvas , timestamp ) {
2020-10-15 15:43:16 +02:00
if ( ! worker ) {
2020-10-16 00:16:05 +02:00
// create new webworker and add event handler only once
2020-11-05 14:21:23 +01:00
log ( 'creating worker thread' ) ;
2021-06-02 22:46:07 +02:00
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
// worker = new Worker(ui.worker, { type: 'module' });
worker = new Worker ( ui . worker ) ;
2020-10-15 15:43:16 +02:00
// after receiving message from webworker, parse&draw results and send new frame for processing
2020-11-05 14:21:23 +01:00
worker . addEventListener ( 'message' , ( msg ) => {
2021-08-15 00:00:26 +02:00
status ( ) ;
2020-11-14 23:22:59 +01:00
if ( msg . data . result . performance && msg . data . result . performance . total ) ui . detectFPS . push ( 1000 / msg . data . result . performance . total ) ;
if ( ui . detectFPS . length > ui . maxFPSframes ) ui . detectFPS . shift ( ) ;
2020-11-19 20:45:59 +01:00
if ( ui . bench ) {
if ( ! bench ) initPerfMonitor ( ) ;
bench . nextFrame ( timestamp ) ;
}
if ( document . getElementById ( 'gl-bench' ) ) document . getElementById ( 'gl-bench' ) . style . display = ui . bench ? 'block' : 'none' ;
2020-11-12 15:21:26 +01:00
lastDetectedResult = msg . data . result ;
2021-06-04 19:51:01 +02:00
if ( msg . data . image ) {
lastDetectedResult . canvas = ( typeof OffscreenCanvas !== 'undefined' ) ? new OffscreenCanvas ( msg . data . width , msg . data . height ) : document . createElement ( 'canvas' ) ;
lastDetectedResult . canvas . width = msg . data . width ;
lastDetectedResult . canvas . height = msg . data . height ;
const ctx = lastDetectedResult . canvas . getContext ( '2d' ) ;
const imageData = new ImageData ( new Uint8ClampedArray ( msg . data . image ) , msg . data . width , msg . data . height ) ;
ctx . putImageData ( imageData , 0 , 0 ) ;
}
2020-11-12 15:21:26 +01:00
ui . framesDetect ++ ;
2021-08-15 00:00:26 +02:00
if ( ! ui . drawThread ) drawResults ( input ) ;
if ( isLive ( input ) ) {
2021-06-18 15:16:21 +02:00
// eslint-disable-next-line no-use-before-define
ui . detectThread = requestAnimationFrame ( ( now ) => runHumanDetect ( input , canvas , now ) ) ;
}
2020-11-05 14:21:23 +01:00
} ) ;
2020-10-15 15:43:16 +02:00
}
2020-10-16 00:16:05 +02:00
// pass image data as arraybuffer to worker by reference to avoid copy
2021-01-03 16:41:56 +01:00
worker . postMessage ( { image : image . data . buffer , width : canvas . width , height : canvas . height , userConfig } , [ image . data . buffer ] ) ;
2020-10-15 15:43:16 +02:00
}
2020-10-17 16:06:02 +02:00
// main processing function when input is webcam, can use direct invocation or web worker
2020-11-12 23:00:06 +01:00
function runHumanDetect ( input , canvas , timestamp ) {
2020-11-05 21:38:09 +01:00
// if live video
2021-08-15 00:00:26 +02:00
if ( ! isLive ( input ) ) {
2020-11-12 15:21:26 +01:00
// stop ui refresh
2021-06-18 15:16:21 +02:00
// if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
2020-11-14 13:02:05 +01:00
if ( ui . detectThread ) cancelAnimationFrame ( ui . detectThread ) ;
2021-06-07 14:38:16 +02:00
if ( input . paused ) log ( 'video paused' ) ;
2021-08-15 00:00:26 +02:00
// if we want to continue and camera not ready, retry in 0.5sec, else just give up
// else if (cameraLive && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);
2021-06-07 14:38:16 +02:00
else log ( ` video not ready: track state: ${ input . srcObject ? input . srcObject . getVideoTracks ( ) [ 0 ] . readyState : 'unknown' } stream state: ${ input . readyState } ` ) ;
2020-11-21 18:21:47 +01:00
log ( 'frame statistics: process:' , ui . framesDetect , 'refresh:' , ui . framesDraw ) ;
2020-12-10 20:47:53 +01:00
log ( 'memory' , human . tf . engine ( ) . memory ( ) ) ;
2020-11-05 21:38:09 +01:00
return ;
}
2021-06-02 23:29:50 +02:00
if ( ui . hintsThread ) clearInterval ( ui . hintsThread ) ;
2020-11-05 21:38:09 +01:00
if ( ui . useWorker ) {
// get image data from video as we cannot send html objects to webworker
2020-11-15 15:28:57 +01:00
const offscreen = ( typeof OffscreenCanvas !== 'undefined' ) ? new OffscreenCanvas ( canvas . width , canvas . height ) : document . createElement ( 'canvas' ) ;
offscreen . width = canvas . width ;
offscreen . height = canvas . height ;
2020-11-05 21:38:09 +01:00
const ctx = offscreen . getContext ( '2d' ) ;
2021-05-30 18:03:34 +02:00
ctx . drawImage ( input , 0 , 0 , canvas . width , canvas . height ) ;
2020-11-05 21:38:09 +01:00
const data = ctx . getImageData ( 0 , 0 , canvas . width , canvas . height ) ;
// perform detection in worker
2021-06-02 18:43:43 +02:00
webWorker ( input , data , canvas , timestamp ) ;
2020-11-05 21:38:09 +01:00
} else {
2020-11-09 12:32:11 +01:00
human . detect ( input , userConfig ) . then ( ( result ) => {
2021-09-17 20:07:44 +02:00
status ( ) ;
2021-09-17 17:23:00 +02:00
/ *
setTimeout ( async ( ) => { // simulate gl context lost 2sec after initial detection
const ext = human . gl && human . gl . gl ? human . gl . gl . getExtension ( 'WEBGL_lose_context' ) : { } ;
if ( ext && ext . loseContext ) {
log ( 'simulate context lost:' , human . env . webgl , human . gl , ext ) ;
human . gl . gl . getExtension ( 'WEBGL_lose_context' ) . loseContext ( ) ;
await videoPause ( ) ;
status ( 'Exception: WebGL' ) ;
}
} , 2000 ) ;
* /
2020-11-14 23:22:59 +01:00
if ( result . performance && result . performance . total ) ui . detectFPS . push ( 1000 / result . performance . total ) ;
if ( ui . detectFPS . length > ui . maxFPSframes ) ui . detectFPS . shift ( ) ;
2020-11-19 20:45:59 +01:00
if ( ui . bench ) {
if ( ! bench ) initPerfMonitor ( ) ;
bench . nextFrame ( timestamp ) ;
}
if ( document . getElementById ( 'gl-bench' ) ) document . getElementById ( 'gl-bench' ) . style . display = ui . bench ? 'block' : 'none' ;
2020-11-20 13:52:50 +01:00
if ( result . error ) {
log ( result . error ) ;
document . getElementById ( 'log' ) . innerText += ` \n Human error: ${ result . error } ` ;
} else {
2020-11-12 15:21:26 +01:00
lastDetectedResult = result ;
if ( ! ui . drawThread ) drawResults ( input ) ;
ui . framesDetect ++ ;
2020-11-14 13:02:05 +01:00
ui . detectThread = requestAnimationFrame ( ( now ) => runHumanDetect ( input , canvas , now ) ) ;
2020-11-12 15:21:26 +01:00
}
2020-11-05 21:38:09 +01:00
} ) ;
2020-10-12 16:08:00 +02:00
}
}
2020-10-17 16:06:02 +02:00
// main processing function when input is image, can use direct invocation or web worker
2021-06-02 18:43:43 +02:00
async function processImage ( input , title ) {
2020-10-13 15:59:21 +02:00
return new Promise ( ( resolve ) => {
2020-11-03 15:34:36 +01:00
const image = new Image ( ) ;
2021-06-02 18:43:43 +02:00
image . onerror = async ( ) => status ( 'image loading error' ) ;
2020-10-16 21:04:51 +02:00
image . onload = async ( ) => {
2021-06-02 23:29:50 +02:00
if ( ui . hintsThread ) clearInterval ( ui . hintsThread ) ;
2021-06-02 18:43:43 +02:00
ui . interpolated = false ; // stop interpolating results if input is image
2021-06-05 02:22:05 +02:00
ui . buffered = false ; // stop buffering result if input is image
2021-06-02 18:43:43 +02:00
status ( ` processing image: ${ title } ` ) ;
2020-10-16 21:04:51 +02:00
const canvas = document . getElementById ( 'canvas' ) ;
image . width = image . naturalWidth ;
image . height = image . naturalHeight ;
2021-06-05 22:13:41 +02:00
canvas . width = userConfig . filter . width && userConfig . filter . width > 0 ? userConfig . filter . width : image . naturalWidth ;
canvas . height = userConfig . filter . height && userConfig . filter . height > 0 ? userConfig . filter . height : image . naturalHeight ;
2021-06-02 18:43:43 +02:00
const origCacheSensitiry = userConfig . cacheSensitivity ;
userConfig . cacheSensitivity = 0 ;
2020-11-09 12:32:11 +01:00
const result = await human . detect ( image , userConfig ) ;
2021-06-02 18:43:43 +02:00
userConfig . cacheSensitivity = origCacheSensitiry ;
2020-11-13 22:13:35 +01:00
lastDetectedResult = result ;
await drawResults ( image ) ;
2020-10-16 21:04:51 +02:00
const thumb = document . createElement ( 'canvas' ) ;
2020-11-03 15:34:36 +01:00
thumb . className = 'thumbnail' ;
2021-06-02 18:43:43 +02:00
thumb . width = ui . columns > 1 ? window . innerWidth / ( ui . columns + 0.1 ) : window . innerWidth - 14 ;
2021-03-03 15:59:04 +01:00
thumb . height = thumb . width * canvas . height / canvas . width ;
if ( result . face && result . face . length > 0 ) {
2021-06-01 14:59:09 +02:00
thumb . title = result . face . map ( ( a , i ) => ` # ${ i } face: ${ Math . trunc ( 100 * a . faceScore ) } % box: ${ Math . trunc ( 100 * a . boxScore ) } % age: ${ Math . trunc ( a . age ) } gender: ${ Math . trunc ( 100 * a . genderScore ) } % ${ a . gender } ` ) . join ( ' | ' ) ;
2021-03-03 15:59:04 +01:00
} else {
thumb . title = 'no face detected' ;
}
2021-06-02 18:43:43 +02:00
thumb . addEventListener ( 'click' , ( evt ) => {
const stdWidth = ui . columns > 1 ? window . innerWidth / ( ui . columns + 0.1 ) : window . innerWidth - 14 ;
// zoom in/out on click
if ( evt . target . style . width === ` ${ stdWidth } px ` ) {
evt . target . style . width = '' ;
evt . target . style . height = ` ${ document . getElementById ( 'log' ) . offsetTop - document . getElementById ( 'media' ) . offsetTop } px ` ;
} else {
evt . target . style . width = ` ${ stdWidth } px ` ;
evt . target . style . height = '' ;
}
// copy to clipboard on click
if ( typeof ClipboardItem !== 'undefined' && navigator . clipboard ) {
evt . target . toBlob ( ( blob ) => {
// eslint-disable-next-line no-undef
const item = new ClipboardItem ( { 'image/png' : blob } ) ;
navigator . clipboard . write ( [ item ] ) ;
log ( 'copied image to clipboard' ) ;
} ) ;
}
} ) ;
2020-10-16 21:04:51 +02:00
const ctx = thumb . getContext ( '2d' ) ;
ctx . drawImage ( canvas , 0 , 0 , canvas . width , canvas . height , 0 , 0 , thumb . width , thumb . height ) ;
2021-06-02 18:43:43 +02:00
const prev = document . getElementsByClassName ( 'thumbnail' ) ;
if ( prev && prev . length > 0 ) document . getElementById ( 'samples-container' ) . insertBefore ( thumb , prev [ 0 ] ) ;
else document . getElementById ( 'samples-container' ) . appendChild ( thumb ) ;
// finish up
status ( ) ;
document . getElementById ( 'play' ) . style . display = 'none' ;
document . getElementById ( 'loader' ) . style . display = 'none' ;
if ( ui . detectThread ) cancelAnimationFrame ( ui . detectThread ) ;
if ( ui . drawThread ) cancelAnimationFrame ( ui . drawThread ) ;
2021-06-11 22:12:24 +02:00
log ( 'processed image:' , title ) ;
2020-10-16 21:04:51 +02:00
resolve ( true ) ;
} ;
image . src = input ;
2020-10-13 15:59:21 +02:00
} ) ;
2020-10-12 16:59:55 +02:00
}
2021-06-07 14:38:16 +02:00
async function processVideo ( input , title ) {
status ( ` processing video: ${ title } ` ) ;
const video = document . createElement ( 'video' ) ;
const canvas = document . getElementById ( 'canvas' ) ;
video . id = 'video-file' ;
video . controls = true ;
video . loop = true ;
// video.onerror = async () => status(`video loading error: ${video.error.message}`);
video . addEventListener ( 'error' , ( ) => status ( ` video loading error: ${ video . error . message } ` ) ) ;
video . addEventListener ( 'canplay' , async ( ) => {
for ( const m of Object . values ( menu ) ) m . hide ( ) ;
document . getElementById ( 'samples-container' ) . style . display = 'none' ;
canvas . style . display = 'block' ;
2021-09-17 17:23:00 +02:00
await videoPlay ( ) ;
2021-06-07 14:38:16 +02:00
if ( ! ui . detectThread ) runHumanDetect ( video , canvas ) ;
} ) ;
video . src = input ;
}
2020-10-17 16:06:02 +02:00
// just initialize everything and call main function
2020-10-17 13:15:23 +02:00
async function detectVideo ( ) {
2020-11-03 15:34:36 +01:00
document . getElementById ( 'samples-container' ) . style . display = 'none' ;
2020-10-17 13:15:23 +02:00
const video = document . getElementById ( 'video' ) ;
const canvas = document . getElementById ( 'canvas' ) ;
2021-06-07 14:38:16 +02:00
canvas . style . display = 'block' ;
2021-06-18 15:16:21 +02:00
cancelAnimationFrame ( ui . detectThread ) ;
2020-10-17 16:25:27 +02:00
if ( ( video . srcObject !== null ) && ! video . paused ) {
2021-09-17 17:23:00 +02:00
await videoPause ( ) ;
2021-06-18 15:16:21 +02:00
// if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
2020-10-17 13:15:23 +02:00
} else {
2020-11-20 13:52:50 +01:00
const cameraError = await setupCamera ( ) ;
if ( ! cameraError ) {
2021-04-26 13:19:30 +02:00
status ( 'starting detection' ) ;
2020-11-20 13:52:50 +01:00
for ( const m of Object . values ( menu ) ) m . hide ( ) ;
2021-09-17 17:23:00 +02:00
await videoPlay ( ) ;
2021-06-18 15:16:21 +02:00
runHumanDetect ( video , canvas ) ;
2020-11-20 13:52:50 +01:00
} else {
status ( cameraError ) ;
}
2020-10-17 13:15:23 +02:00
}
}
2020-10-17 16:06:02 +02:00
// just initialize everything and call main function
2020-10-16 21:04:51 +02:00
async function detectSampleImages ( ) {
2021-05-30 18:03:34 +02:00
document . getElementById ( 'play' ) . style . display = 'none' ;
2020-10-16 21:04:51 +02:00
document . getElementById ( 'canvas' ) . style . display = 'none' ;
2020-11-03 15:34:36 +01:00
document . getElementById ( 'samples-container' ) . style . display = 'block' ;
2021-03-21 22:47:00 +01:00
log ( 'running detection of sample images' ) ;
2020-11-03 15:34:36 +01:00
status ( 'processing images' ) ;
document . getElementById ( 'samples-container' ) . innerHTML = '' ;
2021-03-03 15:59:04 +01:00
for ( const m of Object . values ( menu ) ) m . hide ( ) ;
2021-06-02 18:43:43 +02:00
for ( const image of ui . samples ) await processImage ( image , image ) ;
2020-10-17 13:15:23 +02:00
}
2020-10-18 02:59:43 +02:00
function setupMenu ( ) {
2021-04-17 00:00:24 +02:00
const x = [ ` ${ document . getElementById ( 'btnDisplay' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnImage' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnProcess' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnModel' ) . offsetLeft } px ` ] ;
2021-04-19 01:33:40 +02:00
const top = ` ${ document . getElementById ( 'menubar' ) . clientHeight } px ` ;
2020-11-19 20:45:59 +01:00
2021-04-17 00:00:24 +02:00
menu . display = new Menu ( document . body , '' , { top , left : x [ 0 ] } ) ;
2021-09-15 17:15:38 +02:00
menu . display . addBool ( 'results tree' , ui , 'results' , ( val ) => {
ui . results = val ;
document . getElementById ( 'results' ) . style . display = ui . results ? 'block' : 'none' ;
} ) ;
2020-11-19 20:45:59 +01:00
menu . display . addBool ( 'perf monitor' , ui , 'bench' , ( val ) => ui . bench = val ) ;
2021-04-19 01:33:40 +02:00
menu . display . addBool ( 'buffer output' , ui , 'buffered' , ( val ) => ui . buffered = val ) ;
2021-02-08 19:07:49 +01:00
menu . display . addBool ( 'crop & scale' , ui , 'crop' , ( val ) => {
ui . crop = val ;
setupCamera ( ) ;
} ) ;
menu . display . addBool ( 'camera facing' , ui , 'facing' , ( val ) => {
ui . facing = val ;
setupCamera ( ) ;
} ) ;
2020-11-19 20:45:59 +01:00
menu . display . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-04-19 01:33:40 +02:00
menu . display . addBool ( 'use depth' , human . draw . options , 'useDepth' ) ;
menu . display . addBool ( 'use curves' , human . draw . options , 'useCurves' ) ;
2021-04-13 17:05:52 +02:00
menu . display . addBool ( 'print labels' , human . draw . options , 'drawLabels' ) ;
menu . display . addBool ( 'draw points' , human . draw . options , 'drawPoints' ) ;
menu . display . addBool ( 'draw boxes' , human . draw . options , 'drawBoxes' ) ;
menu . display . addBool ( 'draw polygons' , human . draw . options , 'drawPolygons' ) ;
menu . display . addBool ( 'fill polygons' , human . draw . options , 'fillPolygons' ) ;
2020-11-19 20:45:59 +01:00
2021-04-17 00:00:24 +02:00
menu . image = new Menu ( document . body , '' , { top , left : x [ 1 ] } ) ;
2021-06-05 22:13:41 +02:00
menu . image . addBool ( 'enabled' , userConfig . filter , 'enabled' , ( val ) => userConfig . filter . enabled = val ) ;
ui . menuWidth = menu . image . addRange ( 'image width' , userConfig . filter , 'width' , 0 , 3840 , 10 , ( val ) => userConfig . filter . width = parseInt ( val ) ) ;
ui . menuHeight = menu . image . addRange ( 'image height' , userConfig . filter , 'height' , 0 , 2160 , 10 , ( val ) => userConfig . filter . height = parseInt ( val ) ) ;
2020-11-19 20:45:59 +01:00
menu . image . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-06-05 22:13:41 +02:00
menu . image . addRange ( 'brightness' , userConfig . filter , 'brightness' , - 1.0 , 1.0 , 0.05 , ( val ) => userConfig . filter . brightness = parseFloat ( val ) ) ;
menu . image . addRange ( 'contrast' , userConfig . filter , 'contrast' , - 1.0 , 1.0 , 0.05 , ( val ) => userConfig . filter . contrast = parseFloat ( val ) ) ;
menu . image . addRange ( 'sharpness' , userConfig . filter , 'sharpness' , 0 , 1.0 , 0.05 , ( val ) => userConfig . filter . sharpness = parseFloat ( val ) ) ;
menu . image . addRange ( 'blur' , userConfig . filter , 'blur' , 0 , 20 , 1 , ( val ) => userConfig . filter . blur = parseInt ( val ) ) ;
menu . image . addRange ( 'saturation' , userConfig . filter , 'saturation' , - 1.0 , 1.0 , 0.05 , ( val ) => userConfig . filter . saturation = parseFloat ( val ) ) ;
menu . image . addRange ( 'hue' , userConfig . filter , 'hue' , 0 , 360 , 5 , ( val ) => userConfig . filter . hue = parseInt ( val ) ) ;
menu . image . addRange ( 'pixelate' , userConfig . filter , 'pixelate' , 0 , 32 , 1 , ( val ) => userConfig . filter . pixelate = parseInt ( val ) ) ;
2020-11-19 20:45:59 +01:00
menu . image . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-06-05 22:13:41 +02:00
menu . image . addBool ( 'negative' , userConfig . filter , 'negative' , ( val ) => userConfig . filter . negative = val ) ;
menu . image . addBool ( 'sepia' , userConfig . filter , 'sepia' , ( val ) => userConfig . filter . sepia = val ) ;
menu . image . addBool ( 'vintage' , userConfig . filter , 'vintage' , ( val ) => userConfig . filter . vintage = val ) ;
menu . image . addBool ( 'kodachrome' , userConfig . filter , 'kodachrome' , ( val ) => userConfig . filter . kodachrome = val ) ;
menu . image . addBool ( 'technicolor' , userConfig . filter , 'technicolor' , ( val ) => userConfig . filter . technicolor = val ) ;
menu . image . addBool ( 'polaroid' , userConfig . filter , 'polaroid' , ( val ) => userConfig . filter . polaroid = val ) ;
2021-06-05 21:23:17 +02:00
menu . image . addHTML ( '<input type="file" id="file-input" class="input-file"></input>   input' ) ;
menu . image . addHTML ( '<input type="file" id="file-background" class="input-file"></input>   background' ) ;
2020-11-19 20:45:59 +01:00
2021-04-17 00:00:24 +02:00
menu . process = new Menu ( document . body , '' , { top , left : x [ 2 ] } ) ;
2021-06-05 22:13:41 +02:00
menu . process . addList ( 'backend' , [ 'cpu' , 'webgl' , 'wasm' , 'humangl' ] , userConfig . backend , ( val ) => userConfig . backend = val ) ;
menu . process . addBool ( 'async operations' , userConfig , 'async' , ( val ) => userConfig . async = val ) ;
2020-11-19 20:45:59 +01:00
menu . process . addBool ( 'use web worker' , ui , 'useWorker' ) ;
menu . process . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
menu . process . addLabel ( 'model parameters' ) ;
2021-06-05 22:13:41 +02:00
menu . process . addRange ( 'max objects' , userConfig . face . detector , 'maxDetected' , 1 , 50 , 1 , ( val ) => {
userConfig . face . detector . maxDetected = parseInt ( val ) ;
userConfig . body . maxDetected = parseInt ( val ) ;
userConfig . hand . maxDetected = parseInt ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2021-06-05 22:13:41 +02:00
menu . process . addRange ( 'skip frames' , userConfig . face . detector , 'skipFrames' , 0 , 50 , 1 , ( val ) => {
userConfig . face . detector . skipFrames = parseInt ( val ) ;
userConfig . face . emotion . skipFrames = parseInt ( val ) ;
userConfig . hand . skipFrames = parseInt ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2021-06-05 22:13:41 +02:00
menu . process . addRange ( 'min confidence' , userConfig . face . detector , 'minConfidence' , 0.0 , 1.0 , 0.05 , ( val ) => {
userConfig . face . detector . minConfidence = parseFloat ( val ) ;
userConfig . face . emotion . minConfidence = parseFloat ( val ) ;
userConfig . hand . minConfidence = parseFloat ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2021-06-05 22:13:41 +02:00
menu . process . addRange ( 'overlap' , userConfig . face . detector , 'iouThreshold' , 0.1 , 1.0 , 0.05 , ( val ) => {
userConfig . face . detector . iouThreshold = parseFloat ( val ) ;
userConfig . hand . iouThreshold = parseFloat ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2021-06-05 22:13:41 +02:00
menu . process . addBool ( 'rotation detection' , userConfig . face . detector , 'rotation' , ( val ) => {
userConfig . face . detector . rotation = val ;
userConfig . hand . rotation = val ;
2020-12-10 21:46:45 +01:00
} ) ;
2020-11-19 20:45:59 +01:00
menu . process . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-06-02 18:43:43 +02:00
// menu.process.addButton('process sample images', 'process images', () => detectSampleImages());
// menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');
2020-11-19 20:45:59 +01:00
menu . process . addChart ( 'FPS' , 'FPS' ) ;
2021-04-17 00:00:24 +02:00
menu . models = new Menu ( document . body , '' , { top , left : x [ 3 ] } ) ;
2021-06-05 22:13:41 +02:00
menu . models . addBool ( 'face detect' , userConfig . face , 'enabled' , ( val ) => userConfig . face . enabled = val ) ;
menu . models . addBool ( 'face mesh' , userConfig . face . mesh , 'enabled' , ( val ) => userConfig . face . mesh . enabled = val ) ;
menu . models . addBool ( 'face iris' , userConfig . face . iris , 'enabled' , ( val ) => userConfig . face . iris . enabled = val ) ;
menu . models . addBool ( 'face description' , userConfig . face . description , 'enabled' , ( val ) => userConfig . face . description . enabled = val ) ;
menu . models . addBool ( 'face emotion' , userConfig . face . emotion , 'enabled' , ( val ) => userConfig . face . emotion . enabled = val ) ;
2020-11-19 20:45:59 +01:00
menu . models . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-06-05 22:13:41 +02:00
menu . models . addBool ( 'body pose' , userConfig . body , 'enabled' , ( val ) => userConfig . body . enabled = val ) ;
menu . models . addBool ( 'hand pose' , userConfig . hand , 'enabled' , ( val ) => userConfig . hand . enabled = val ) ;
2020-11-19 20:45:59 +01:00
menu . models . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-06-05 22:13:41 +02:00
menu . models . addBool ( 'gestures' , userConfig . gesture , 'enabled' , ( val ) => userConfig . gesture . enabled = val ) ;
2020-11-19 20:45:59 +01:00
menu . models . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-06-05 22:13:41 +02:00
menu . models . addBool ( 'body segmentation' , userConfig . segmentation , 'enabled' , ( val ) => userConfig . segmentation . enabled = val ) ;
2021-06-05 02:22:05 +02:00
menu . models . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-06-05 22:13:41 +02:00
menu . models . addBool ( 'object detection' , userConfig . object , 'enabled' , ( val ) => userConfig . object . enabled = val ) ;
2021-03-17 16:32:37 +01:00
menu . models . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-03-21 22:47:00 +01:00
menu . models . addBool ( 'face compare' , compare , 'enabled' , ( val ) => {
compare . enabled = val ;
compare . original = null ;
2020-11-19 20:45:59 +01:00
} ) ;
2020-10-18 02:59:43 +02:00
2021-08-15 00:00:26 +02:00
for ( const m of Object . values ( menu ) ) m . hide ( ) ;
2020-11-19 20:45:59 +01:00
document . getElementById ( 'btnDisplay' ) . addEventListener ( 'click' , ( evt ) => menu . display . toggle ( evt ) ) ;
document . getElementById ( 'btnImage' ) . addEventListener ( 'click' , ( evt ) => menu . image . toggle ( evt ) ) ;
document . getElementById ( 'btnProcess' ) . addEventListener ( 'click' , ( evt ) => menu . process . toggle ( evt ) ) ;
document . getElementById ( 'btnModel' ) . addEventListener ( 'click' , ( evt ) => menu . models . toggle ( evt ) ) ;
document . getElementById ( 'btnStart' ) . addEventListener ( 'click' , ( ) => detectVideo ( ) ) ;
document . getElementById ( 'play' ) . addEventListener ( 'click' , ( ) => detectVideo ( ) ) ;
2020-11-12 23:00:06 +01:00
}
2021-04-17 00:00:24 +02:00
async function resize ( ) {
2021-04-26 13:19:30 +02:00
window . onresize = null ;
2021-05-28 16:43:48 +02:00
// best setting for mobile, ignored for desktop
// can set dynamic value such as Math.min(1, Math.round(100 * window.innerWidth / 960) / 100);
const viewportScale = 0.7 ;
2021-04-19 15:30:04 +02:00
if ( ! ui . viewportSet ) {
const viewport = document . querySelector ( 'meta[name=viewport]' ) ;
viewport . setAttribute ( 'content' , ` width=device-width, shrink-to-fit=yes, minimum-scale=0.2, maximum-scale=2.0, user-scalable=yes, initial-scale= ${ viewportScale } ` ) ;
ui . viewportSet = true ;
}
2021-04-17 00:00:24 +02:00
const x = [ ` ${ document . getElementById ( 'btnDisplay' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnImage' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnProcess' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnModel' ) . offsetLeft } px ` ] ;
2021-04-19 01:33:40 +02:00
const top = ` ${ document . getElementById ( 'menubar' ) . clientHeight - 3 } px ` ;
menu . display . menu . style . top = top ;
menu . image . menu . style . top = top ;
menu . process . menu . style . top = top ;
menu . models . menu . style . top = top ;
2021-04-17 00:00:24 +02:00
menu . display . menu . style . left = x [ 0 ] ;
menu . image . menu . style . left = x [ 1 ] ;
menu . process . menu . style . left = x [ 2 ] ;
menu . models . menu . style . left = x [ 3 ] ;
2021-04-19 01:33:40 +02:00
2021-05-28 16:43:48 +02:00
const fontSize = Math . trunc ( 10 * ( 1 - viewportScale ) ) + 14 ;
2021-04-19 01:33:40 +02:00
document . documentElement . style . fontSize = ` ${ fontSize } px ` ;
2021-05-28 16:43:48 +02:00
human . draw . options . font = ` small-caps ${ fontSize } px "Segoe UI" ` ;
human . draw . options . lineHeight = fontSize + 2 ;
2021-04-19 01:33:40 +02:00
2021-04-26 13:19:30 +02:00
await setupCamera ( ) ;
window . onresize = resize ;
2021-04-17 00:00:24 +02:00
}
2021-03-04 16:33:08 +01:00
async function drawWarmup ( res ) {
const canvas = document . getElementById ( 'canvas' ) ;
canvas . width = res . canvas . width ;
canvas . height = res . canvas . height ;
const ctx = canvas . getContext ( '2d' ) ;
ctx . drawImage ( res . canvas , 0 , 0 , res . canvas . width , res . canvas . height , 0 , 0 , canvas . width , canvas . height ) ;
2021-05-23 19:52:49 +02:00
await human . draw . all ( canvas , res , drawOptions ) ;
2021-03-04 16:33:08 +01:00
}
2021-06-05 21:23:17 +02:00
async function processDataURL ( f , action ) {
2021-06-02 18:43:43 +02:00
return new Promise ( ( resolve ) => {
const reader = new FileReader ( ) ;
reader . onload = async ( e ) => {
2021-06-05 21:23:17 +02:00
if ( action === 'process' ) {
2021-06-07 14:38:16 +02:00
if ( e . target . result . startsWith ( 'data:image' ) ) await processImage ( e . target . result , f . name ) ;
if ( e . target . result . startsWith ( 'data:video' ) ) await processVideo ( e . target . result , f . name ) ;
2021-06-05 21:23:17 +02:00
document . getElementById ( 'canvas' ) . style . display = 'none' ;
}
2021-06-05 22:13:41 +02:00
if ( action === 'background' ) {
const image = new Image ( ) ;
image . onerror = async ( ) => status ( 'image loading error' ) ;
image . onload = async ( ) => {
ui . background = image ;
2021-06-07 02:34:29 +02:00
if ( document . getElementById ( 'canvas' ) . style . display === 'block' ) { // replace canvas used for video
const canvas = document . getElementById ( 'canvas' ) ;
const ctx = canvas . getContext ( '2d' ) ;
const overlaid = await human . segmentation ( canvas , ui . background , userConfig ) ;
if ( overlaid ) ctx . drawImage ( overlaid , 0 , 0 ) ;
} else {
const canvases = document . getElementById ( 'samples-container' ) . children ; // replace loaded images
for ( const canvas of canvases ) {
const ctx = canvas . getContext ( '2d' ) ;
const overlaid = await human . segmentation ( canvas , ui . background , userConfig ) ;
if ( overlaid ) ctx . drawImage ( overlaid , 0 , 0 ) ;
}
}
2021-06-05 22:13:41 +02:00
} ;
2021-06-07 14:38:16 +02:00
image . src = e . target . result ;
2021-06-05 22:13:41 +02:00
}
2021-06-02 18:43:43 +02:00
resolve ( true ) ;
} ;
reader . readAsDataURL ( f ) ;
} ) ;
}
2021-06-05 22:13:41 +02:00
async function runSegmentation ( ) {
document . getElementById ( 'file-background' ) . onchange = async ( evt ) => {
userConfig . segmentation . enabled = true ;
evt . preventDefault ( ) ;
if ( evt . target . files . length < 2 ) ui . columns = 1 ;
for ( const f of evt . target . files ) await processDataURL ( f , 'background' ) ;
} ;
}
2021-06-02 18:43:43 +02:00
async function dragAndDrop ( ) {
document . body . addEventListener ( 'dragenter' , ( evt ) => evt . preventDefault ( ) ) ;
document . body . addEventListener ( 'dragleave' , ( evt ) => evt . preventDefault ( ) ) ;
document . body . addEventListener ( 'dragover' , ( evt ) => evt . preventDefault ( ) ) ;
document . body . addEventListener ( 'drop' , async ( evt ) => {
evt . preventDefault ( ) ;
evt . dataTransfer . dropEffect = 'copy' ;
if ( evt . dataTransfer . files . length < 2 ) ui . columns = 1 ;
2021-06-05 21:23:17 +02:00
for ( const f of evt . dataTransfer . files ) await processDataURL ( f , 'process' ) ;
2021-06-02 18:43:43 +02:00
} ) ;
2021-06-05 22:13:41 +02:00
document . getElementById ( 'file-input' ) . onchange = async ( evt ) => {
evt . preventDefault ( ) ;
if ( evt . target . files . length < 2 ) ui . columns = 1 ;
for ( const f of evt . target . files ) await processDataURL ( f , 'process' ) ;
} ;
2021-06-02 18:43:43 +02:00
}
2021-06-02 23:29:50 +02:00
async function drawHints ( ) {
const hint = document . getElementById ( 'hint' ) ;
ui . hintsThread = setInterval ( ( ) => {
const rnd = Math . trunc ( Math . random ( ) * hints . length ) ;
hint . innerText = 'hint: ' + hints [ rnd ] ;
hint . style . opacity = 1 ;
setTimeout ( ( ) => {
hint . style . opacity = 0 ;
} , 4500 ) ;
} , 5000 ) ;
}
2021-05-30 23:56:40 +02:00
async function pwaRegister ( ) {
if ( ! pwa . enabled ) return ;
if ( 'serviceWorker' in navigator ) {
try {
let found ;
const regs = await navigator . serviceWorker . getRegistrations ( ) ;
for ( const reg of regs ) {
log ( 'pwa found:' , reg . scope ) ;
if ( reg . scope . startsWith ( location . origin ) ) found = reg ;
}
if ( ! found ) {
2021-05-31 00:00:51 +02:00
const reg = await navigator . serviceWorker . register ( pwa . scriptFile , { scope : location . pathname } ) ;
2021-05-30 23:56:40 +02:00
found = reg ;
log ( 'pwa registered:' , reg . scope ) ;
}
} catch ( err ) {
if ( err . name === 'SecurityError' ) log ( 'pwa: ssl certificate is untrusted' ) ;
else log ( 'pwa error:' , err ) ;
}
if ( navigator . serviceWorker . controller ) {
// update pwa configuration as it doesn't have access to it
navigator . serviceWorker . controller . postMessage ( { key : 'cacheModels' , val : pwa . cacheModels } ) ;
navigator . serviceWorker . controller . postMessage ( { key : 'cacheWASM' , val : pwa . cacheWASM } ) ;
navigator . serviceWorker . controller . postMessage ( { key : 'cacheOther' , val : pwa . cacheOther } ) ;
log ( 'pwa ctive:' , navigator . serviceWorker . controller . scriptURL ) ;
const cache = await caches . open ( pwa . cacheName ) ;
if ( cache ) {
const content = await cache . matchAll ( ) ;
log ( 'pwa cache:' , content . length , 'files' ) ;
}
}
} else {
log ( 'pwa inactive' ) ;
}
}
2020-10-12 16:08:00 +02:00
async function main ( ) {
2021-04-25 20:30:40 +02:00
window . addEventListener ( 'unhandledrejection' , ( evt ) => {
2021-09-17 20:07:44 +02:00
if ( ui . detectThread ) cancelAnimationFrame ( ui . detectThread ) ;
if ( ui . drawThread ) cancelAnimationFrame ( ui . drawThread ) ;
const msg = evt . reason . message || evt . reason || evt ;
2021-04-25 20:30:40 +02:00
// eslint-disable-next-line no-console
2021-09-17 20:07:44 +02:00
console . error ( msg ) ;
document . getElementById ( 'log' ) . innerHTML = msg ;
status ( ` exception: ${ msg } ` ) ;
2021-04-25 20:30:40 +02:00
evt . preventDefault ( ) ;
} ) ;
2021-03-21 22:47:00 +01:00
log ( 'demo starting ...' ) ;
2021-04-09 16:02:40 +02:00
2021-05-22 19:17:07 +02:00
document . documentElement . style . setProperty ( '--icon-size' , ui . iconSize ) ;
2021-06-02 23:29:50 +02:00
drawHints ( ) ;
2021-05-30 18:03:34 +02:00
// sanity check for webworker compatibility
if ( typeof Worker === 'undefined' || typeof OffscreenCanvas === 'undefined' ) {
ui . useWorker = false ;
2021-09-17 20:07:44 +02:00
log ( 'webworker functionality is disabled due to missing browser functionality' ) ;
2021-05-30 18:03:34 +02:00
}
2021-05-30 23:56:40 +02:00
// register PWA ServiceWorker
await pwaRegister ( ) ;
2021-04-09 16:02:40 +02:00
// parse url search params
const params = new URLSearchParams ( location . search ) ;
log ( 'url options:' , params . toString ( ) ) ;
if ( params . has ( 'worker' ) ) {
ui . useWorker = JSON . parse ( params . get ( 'worker' ) ) ;
log ( 'overriding worker:' , ui . useWorker ) ;
}
if ( params . has ( 'backend' ) ) {
2021-08-05 16:38:04 +02:00
userConfig . backend = params . get ( 'backend' ) ; // string
2021-04-09 16:02:40 +02:00
log ( 'overriding backend:' , userConfig . backend ) ;
}
if ( params . has ( 'preload' ) ) {
ui . modelsPreload = JSON . parse ( params . get ( 'preload' ) ) ;
log ( 'overriding preload:' , ui . modelsPreload ) ;
}
if ( params . has ( 'warmup' ) ) {
2021-08-05 16:38:04 +02:00
ui . modelsWarmup = params . get ( 'warmup' ) ; // string
2021-04-09 16:02:40 +02:00
log ( 'overriding warmup:' , ui . modelsWarmup ) ;
}
2021-08-05 16:38:04 +02:00
if ( params . has ( 'bench' ) ) {
ui . bench = JSON . parse ( params . get ( 'bench' ) ) ;
log ( 'overriding bench:' , ui . bench ) ;
}
2021-09-02 14:50:16 +02:00
if ( params . has ( 'play' ) ) {
ui . autoPlay = true ;
log ( 'overriding autoplay:' , true ) ;
}
2021-08-05 16:38:04 +02:00
if ( params . has ( 'draw' ) ) {
ui . drawWarmup = JSON . parse ( params . get ( 'draw' ) ) ;
log ( 'overriding drawWarmup:' , ui . drawWarmup ) ;
}
if ( params . has ( 'async' ) ) {
userConfig . async = JSON . parse ( params . get ( 'async' ) ) ;
log ( 'overriding async:' , userConfig . async ) ;
}
2021-04-09 16:02:40 +02:00
// create instance of human
human = new Human ( userConfig ) ;
2021-09-12 18:42:17 +02:00
log ( 'human version:' , human . version ) ;
2021-06-05 22:13:41 +02:00
userConfig = { ... human . config , ... userConfig } ;
2021-04-09 16:02:40 +02:00
if ( typeof tf !== 'undefined' ) {
2021-04-25 22:58:18 +02:00
// eslint-disable-next-line no-undef
2021-04-09 16:02:40 +02:00
log ( 'TensorFlow external version:' , tf . version ) ;
2021-04-25 22:58:18 +02:00
// eslint-disable-next-line no-undef
2021-04-09 16:02:40 +02:00
human . tf = tf ; // use externally loaded version of tfjs
}
2021-09-12 18:42:17 +02:00
log ( 'tfjs version:' , human . tf . version . tfjs ) ;
2021-04-09 16:02:40 +02:00
// setup main menu
2021-04-17 00:00:24 +02:00
await setupMenu ( ) ;
await resize ( ) ;
2020-11-19 20:45:59 +01:00
document . getElementById ( 'log' ) . innerText = ` Human: version ${ human . version } ` ;
2021-04-09 16:02:40 +02:00
// preload models
2020-11-15 15:28:57 +01:00
if ( ui . modelsPreload && ! ui . useWorker ) {
2020-11-04 16:18:22 +01:00
status ( 'loading' ) ;
2020-12-08 15:58:30 +01:00
await human . load ( userConfig ) ; // this is not required, just pre-loads all models
2021-02-26 16:13:31 +01:00
const loaded = Object . keys ( human . models ) . filter ( ( a ) => human . models [ a ] ) ;
2021-03-21 22:47:00 +01:00
log ( 'demo loaded models:' , loaded ) ;
2021-09-17 20:07:44 +02:00
} else {
await human . init ( ) ;
2020-11-04 16:18:22 +01:00
}
2021-04-09 16:02:40 +02:00
// warmup models
if ( ui . modelsWarmup && ! ui . useWorker ) {
2020-11-04 16:18:22 +01:00
status ( 'initializing' ) ;
2021-06-18 15:16:21 +02:00
if ( ! userConfig . warmup || userConfig . warmup === 'none' ) userConfig . warmup = 'full' ;
2021-03-04 16:33:08 +01:00
const res = await human . warmup ( userConfig ) ; // this is not required, just pre-warms all models for faster initial inference
if ( res && res . canvas && ui . drawWarmup ) await drawWarmup ( res ) ;
2020-11-04 16:18:22 +01:00
}
2021-04-09 16:02:40 +02:00
// ready
2020-11-03 04:15:37 +01:00
status ( 'human: ready' ) ;
2020-11-03 15:34:36 +01:00
document . getElementById ( 'loader' ) . style . display = 'none' ;
document . getElementById ( 'play' ) . style . display = 'block' ;
2021-09-15 17:15:38 +02:00
document . getElementById ( 'results' ) . style . display = 'none' ;
2021-05-17 05:55:08 +02:00
2021-06-02 18:43:43 +02:00
// init drag & drop
await dragAndDrop ( ) ;
2021-06-05 22:13:41 +02:00
// init segmentation
await runSegmentation ( ) ;
2021-05-17 05:55:08 +02:00
if ( params . has ( 'image' ) ) {
2021-05-30 23:56:40 +02:00
try {
const image = JSON . parse ( params . get ( 'image' ) ) ;
log ( 'overriding image:' , image ) ;
ui . samples = [ image ] ;
2021-06-02 18:43:43 +02:00
ui . columns = 1 ;
2021-05-30 23:56:40 +02:00
} catch {
status ( 'cannot parse input image' ) ;
log ( 'cannot parse input image' , params . get ( 'image' ) ) ;
ui . samples = [ ] ;
}
if ( ui . samples . length > 0 ) await detectSampleImages ( ) ;
2021-05-17 05:55:08 +02:00
}
if ( params . has ( 'images' ) ) {
log ( 'overriding images list:' , JSON . parse ( params . get ( 'images' ) ) ) ;
await detectSampleImages ( ) ;
}
2021-09-12 18:42:17 +02:00
if ( human . config . debug ) log ( 'environment:' , human . env ) ;
2021-09-17 17:23:00 +02:00
if ( human . config . backend === 'humangl' && human . config . debug ) log ( 'backend:' , human . gl ) ;
2020-10-12 16:08:00 +02:00
}
window . onload = main ;