modularize model loading

pull/280/head
Vladimir Mandic 2021-06-18 09:16:21 -04:00
parent 53b4939a62
commit 3b75e5e82c
6 changed files with 108 additions and 82 deletions

View File

@ -9,11 +9,11 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog ## Changelog
### **HEAD -> main** 2021/06/18 mandic00@live.com
### **2.0.3** 2021/06/18 mandic00@live.com ### **2.0.3** 2021/06/18 mandic00@live.com
### **origin/main** 2021/06/16 mandic00@live.com
- fix demo paths - fix demo paths
- added multithreaded demo - added multithreaded demo

View File

@ -52,7 +52,6 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) ap
- [**NPM Package**](https://www.npmjs.com/package/@vladmandic/human) - [**NPM Package**](https://www.npmjs.com/package/@vladmandic/human)
- [**Issues Tracker**](https://github.com/vladmandic/human/issues) - [**Issues Tracker**](https://github.com/vladmandic/human/issues)
- [**TypeDoc API Specification: Human**](https://vladmandic.github.io/human/typedoc/classes/human.html) - [**TypeDoc API Specification: Human**](https://vladmandic.github.io/human/typedoc/classes/human.html)
- [**TypeDoc API Specification: Root**](https://vladmandic.github.io/human/typedoc/)
- [**Change Log**](https://github.com/vladmandic/human/blob/main/CHANGELOG.md) - [**Change Log**](https://github.com/vladmandic/human/blob/main/CHANGELOG.md)
- [**Current To-do List**](https://github.com/vladmandic/human/blob/main/TODO.md) - [**Current To-do List**](https://github.com/vladmandic/human/blob/main/TODO.md)

View File

@ -75,7 +75,7 @@ const ui = {
worker: 'index-worker.js', worker: 'index-worker.js',
maxFPSframes: 10, // keep fps history for how many frames maxFPSframes: 10, // keep fps history for how many frames
modelsPreload: true, // preload human models on startup modelsPreload: true, // preload human models on startup
modelsWarmup: true, // warmup human models on startup modelsWarmup: false, // warmup human models on startup
buffered: true, // should output be buffered between frames buffered: true, // should output be buffered between frames
interpolated: true, // should output be interpolated for smoothness between frames interpolated: true, // should output be interpolated for smoothness between frames
iconSize: '48px', // ui icon sizes iconSize: '48px', // ui icon sizes
@ -272,7 +272,6 @@ async function drawResults(input) {
if (ui.drawThread) { if (ui.drawThread) {
log('stopping buffered refresh'); log('stopping buffered refresh');
cancelAnimationFrame(ui.drawThread); cancelAnimationFrame(ui.drawThread);
ui.drawThread = null;
} }
} }
} }
@ -421,8 +420,13 @@ function webWorker(input, image, canvas, timestamp) {
status(); status();
drawResults(input); drawResults(input);
} }
const videoLive = (input.readyState > 2) && (!input.paused);
const cameraLive = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && !input.paused;
const live = videoLive || cameraLive;
if (live) {
// eslint-disable-next-line no-use-before-define // eslint-disable-next-line no-use-before-define
ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now)); ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
}
}); });
} }
// pass image data as arraybuffer to worker by reference to avoid copy // pass image data as arraybuffer to worker by reference to avoid copy
@ -437,16 +441,12 @@ function runHumanDetect(input, canvas, timestamp) {
const live = videoLive || cameraLive; const live = videoLive || cameraLive;
if (!live) { if (!live) {
// stop ui refresh // stop ui refresh
if (ui.drawThread) cancelAnimationFrame(ui.drawThread); // if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
if (ui.detectThread) cancelAnimationFrame(ui.detectThread); if (ui.detectThread) cancelAnimationFrame(ui.detectThread);
ui.drawThread = null;
ui.detectThread = null;
// if we want to continue and camera not ready, retry in 0.5sec, else just give up // if we want to continue and camera not ready, retry in 0.5sec, else just give up
if (input.paused) log('video paused'); if (input.paused) log('video paused');
else if (cameraLive && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500); else if (cameraLive && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);
else log(`video not ready: track state: ${input.srcObject ? input.srcObject.getVideoTracks()[0].readyState : 'unknown'} stream state: ${input.readyState}`); else log(`video not ready: track state: ${input.srcObject ? input.srcObject.getVideoTracks()[0].readyState : 'unknown'} stream state: ${input.readyState}`);
clearTimeout(ui.drawThread);
ui.drawThread = null;
log('frame statistics: process:', ui.framesDetect, 'refresh:', ui.framesDraw); log('frame statistics: process:', ui.framesDetect, 'refresh:', ui.framesDraw);
log('memory', human.tf.engine().memory()); log('memory', human.tf.engine().memory());
return; return;
@ -581,10 +581,12 @@ async function detectVideo() {
const video = document.getElementById('video'); const video = document.getElementById('video');
const canvas = document.getElementById('canvas'); const canvas = document.getElementById('canvas');
canvas.style.display = 'block'; canvas.style.display = 'block';
cancelAnimationFrame(ui.detectThread);
if ((video.srcObject !== null) && !video.paused) { if ((video.srcObject !== null) && !video.paused) {
document.getElementById('btnStartText').innerHTML = 'start video'; document.getElementById('btnStartText').innerHTML = 'start video';
status('paused'); status('paused');
video.pause(); await video.pause();
// if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
} else { } else {
const cameraError = await setupCamera(); const cameraError = await setupCamera();
if (!cameraError) { if (!cameraError) {
@ -592,7 +594,7 @@ async function detectVideo() {
for (const m of Object.values(menu)) m.hide(); for (const m of Object.values(menu)) m.hide();
document.getElementById('btnStartText').innerHTML = 'pause video'; document.getElementById('btnStartText').innerHTML = 'pause video';
await video.play(); await video.play();
if (!ui.detectThread) runHumanDetect(video, canvas); runHumanDetect(video, canvas);
} else { } else {
status(cameraError); status(cameraError);
} }
@ -943,6 +945,7 @@ async function main() {
// warmup models // warmup models
if (ui.modelsWarmup && !ui.useWorker) { if (ui.modelsWarmup && !ui.useWorker) {
status('initializing'); status('initializing');
if (!userConfig.warmup || userConfig.warmup === 'none') userConfig.warmup = 'full';
const res = await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference const res = await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference
if (res && res.canvas && ui.drawWarmup) await drawWarmup(res); if (res && res.canvas && ui.drawWarmup) await drawWarmup(res);
} }

View File

@ -69,7 +69,7 @@ export async function predict(input: Tensor, config: Config): Promise<Hand[]> {
return hands; return hands;
} }
export async function load(config: Config): Promise<[unknown, unknown]> { export async function load(config: Config): Promise<[GraphModel | null, GraphModel | null]> {
if (!handDetectorModel || !handPoseModel) { if (!handDetectorModel || !handPoseModel) {
// @ts-ignore type mismatch on GraphModel // @ts-ignore type mismatch on GraphModel
[handDetectorModel, handPoseModel] = await Promise.all([ [handDetectorModel, handPoseModel] = await Promise.all([

View File

@ -8,10 +8,10 @@ import { Result, Gesture } from './result';
import * as sysinfo from './sysinfo'; import * as sysinfo from './sysinfo';
import * as tf from '../dist/tfjs.esm.js'; import * as tf from '../dist/tfjs.esm.js';
import * as backend from './tfjs/backend'; import * as backend from './tfjs/backend';
import * as models from './models';
import * as face from './face'; import * as face from './face';
import * as facemesh from './blazeface/facemesh'; import * as facemesh from './blazeface/facemesh';
import * as faceres from './faceres/faceres'; import * as faceres from './faceres/faceres';
import * as emotion from './emotion/emotion';
import * as posenet from './posenet/posenet'; import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose'; import * as handpose from './handpose/handpose';
import * as blazepose from './blazepose/blazepose'; import * as blazepose from './blazepose/blazepose';
@ -19,15 +19,15 @@ import * as efficientpose from './efficientpose/efficientpose';
import * as movenet from './movenet/movenet'; import * as movenet from './movenet/movenet';
import * as nanodet from './object/nanodet'; import * as nanodet from './object/nanodet';
import * as centernet from './object/centernet'; import * as centernet from './object/centernet';
import * as segmentation from './segmentation/segmentation';
import * as gesture from './gesture/gesture'; import * as gesture from './gesture/gesture';
import * as image from './image/image'; import * as image from './image/image';
import * as draw from './draw/draw'; import * as draw from './draw/draw';
import * as persons from './persons'; import * as persons from './persons';
import * as interpolate from './interpolate'; import * as interpolate from './interpolate';
import * as segmentation from './segmentation/segmentation';
import * as sample from './sample'; import * as sample from './sample';
import * as app from '../package.json'; import * as app from '../package.json';
import { Tensor } from './tfjs/types'; import { Tensor, GraphModel } from './tfjs/types';
// export types // export types
export type { Config } from './config'; export type { Config } from './config';
@ -49,11 +49,6 @@ export type Error = { error: string };
*/ */
export type TensorFlow = typeof tf; export type TensorFlow = typeof tf;
/** Generic Model object type
* holds instance of individual models
*/
type Model = unknown;
/** /**
* **Human** library main class * **Human** library main class
* *
@ -87,8 +82,8 @@ export class Human {
* - Can be embedded or externally provided * - Can be embedded or externally provided
*/ */
tf: TensorFlow; tf: TensorFlow;
/** Draw helper classes that can draw detected objects on canvas using specified draw styles /** Draw helper classes that can draw detected objects on canvas using specified draw
* - options: global settings for all draw operations, can be overriden for each draw method, for details see {@link DrawOptions} * - options: {@link DrawOptions} global settings for all draw operations, can be overriden for each draw method
* - face: draw detected faces * - face: draw detected faces
* - body: draw detected people and body parts * - body: draw detected people and body parts
* - hand: draw detected hands and hand parts * - hand: draw detected hands and hand parts
@ -106,20 +101,20 @@ export class Human {
}; };
/** @internal: Currently loaded models */ /** @internal: Currently loaded models */
models: { models: {
face: [Model, Model, Model] | null, face: [unknown, GraphModel | null, GraphModel | null] | null,
posenet: Model | null, posenet: GraphModel | null,
blazepose: Model | null, blazepose: GraphModel | null,
efficientpose: Model | null, efficientpose: GraphModel | null,
movenet: Model | null, movenet: GraphModel | null,
handpose: [Model, Model] | null, handpose: [GraphModel | null, GraphModel | null] | null,
age: Model | null, age: GraphModel | null,
gender: Model | null, gender: GraphModel | null,
emotion: Model | null, emotion: GraphModel | null,
embedding: Model | null, embedding: GraphModel | null,
nanodet: Model | null, nanodet: GraphModel | null,
centernet: Model | null, centernet: GraphModel | null,
faceres: Model | null, faceres: GraphModel | null,
segmentation: Model | null, segmentation: GraphModel | null,
}; };
/** Reference face triangualtion array of 468 points, used for triangle references between points */ /** Reference face triangualtion array of 468 points, used for triangle references between points */
faceTriangulation: typeof facemesh.triangulation; faceTriangulation: typeof facemesh.triangulation;
@ -274,47 +269,8 @@ export class Human {
if (this.config.debug) log('tf flags:', this.tf.ENV.flags); if (this.config.debug) log('tf flags:', this.tf.ENV.flags);
} }
} }
if (this.config.async) { // load models concurrently
[ await models.load(this); // actually loads models
// @ts-ignore async model loading is not correctly inferred
this.models.face,
this.models.emotion,
// @ts-ignore async model loading is not correctly inferred
this.models.handpose,
this.models.posenet,
this.models.blazepose,
this.models.efficientpose,
this.models.movenet,
this.models.nanodet,
this.models.centernet,
this.models.faceres,
this.models.segmentation,
] = await Promise.all([
this.models.face || (this.config.face.enabled ? facemesh.load(this.config) : null),
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
this.models.posenet || (this.config.body.enabled && this.config.body.modelPath.includes('posenet') ? posenet.load(this.config) : null),
this.models.blazepose || (this.config.body.enabled && this.config.body.modelPath.includes('blazepose') ? blazepose.load(this.config) : null),
this.models.efficientpose || (this.config.body.enabled && this.config.body.modelPath.includes('efficientpose') ? efficientpose.load(this.config) : null),
this.models.movenet || (this.config.body.enabled && this.config.body.modelPath.includes('movenet') ? movenet.load(this.config) : null),
this.models.nanodet || (this.config.object.enabled && this.config.object.modelPath.includes('nanodet') ? nanodet.load(this.config) : null),
this.models.centernet || (this.config.object.enabled && this.config.object.modelPath.includes('centernet') ? centernet.load(this.config) : null),
this.models.faceres || ((this.config.face.enabled && this.config.face.description.enabled) ? faceres.load(this.config) : null),
this.models.segmentation || (this.config.segmentation.enabled ? segmentation.load(this.config) : null),
]);
} else { // load models sequentially
if (this.config.face.enabled && !this.models.face) this.models.face = await facemesh.load(this.config);
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
if (this.config.body.enabled && !this.models.posenet && this.config.body.modelPath.includes('posenet')) this.models.posenet = await posenet.load(this.config);
if (this.config.body.enabled && !this.models.blazepose && this.config.body.modelPath.includes('blazepose')) this.models.blazepose = await blazepose.load(this.config);
if (this.config.body.enabled && !this.models.efficientpose && this.config.body.modelPath.includes('efficientpose')) this.models.efficientpose = await blazepose.load(this.config);
if (this.config.body.enabled && !this.models.movenet && this.config.body.modelPath.includes('movenet')) this.models.movenet = await movenet.load(this.config);
if (this.config.object.enabled && !this.models.nanodet && this.config.object.modelPath.includes('nanodet')) this.models.nanodet = await nanodet.load(this.config);
if (this.config.object.enabled && !this.models.centernet && this.config.object.modelPath.includes('centernet')) this.models.centernet = await centernet.load(this.config);
if (this.config.face.enabled && this.config.face.description.enabled && !this.models.faceres) this.models.faceres = await faceres.load(this.config);
if (this.config.segmentation.enabled && !this.models.segmentation) this.models.segmentation = await segmentation.load(this.config);
}
if (this.#firstRun) { // print memory stats on first run if (this.#firstRun) { // print memory stats on first run
if (this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); if (this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors');
@ -695,7 +651,7 @@ export class Human {
return res; return res;
} }
/** Warmup metho pre-initializes all models for faster inference /** Warmup method pre-initializes all configured models for faster inference
* - can take significant time on startup * - can take significant time on startup
* - only used for `webgl` and `humangl` backends * - only used for `webgl` and `humangl` backends
* @param userConfig?: Config * @param userConfig?: Config

68
src/models.ts Normal file
View File

@ -0,0 +1,68 @@
import * as facemesh from './blazeface/facemesh';
import * as faceres from './faceres/faceres';
import * as emotion from './emotion/emotion';
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
import * as blazepose from './blazepose/blazepose';
import * as efficientpose from './efficientpose/efficientpose';
import * as movenet from './movenet/movenet';
import * as nanodet from './object/nanodet';
import * as centernet from './object/centernet';
import * as segmentation from './segmentation/segmentation';
/** Load method preloads all instance.configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
* @param userinstance.config?: {@link instance.config}
*/
export async function load(instance) {
if (instance.config.async) { // load models concurrently
[
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.face,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.emotion,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.handpose,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.posenet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.blazepose,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.efficientpose,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.movenet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.nanodet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.centernet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.faceres,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.segmentation,
] = await Promise.all([
instance.models.face || (instance.config.face.enabled ? facemesh.load(instance.config) : null),
instance.models.emotion || ((instance.config.face.enabled && instance.config.face.emotion.enabled) ? emotion.load(instance.config) : null),
instance.models.handpose || (instance.config.hand.enabled ? handpose.load(instance.config) : null),
instance.models.posenet || (instance.config.body.enabled && instance.config.body.modelPath.includes('posenet') ? posenet.load(instance.config) : null),
instance.models.blazepose || (instance.config.body.enabled && instance.config.body.modelPath.includes('blazepose') ? blazepose.load(instance.config) : null),
instance.models.efficientpose || (instance.config.body.enabled && instance.config.body.modelPath.includes('efficientpose') ? efficientpose.load(instance.config) : null),
instance.models.movenet || (instance.config.body.enabled && instance.config.body.modelPath.includes('movenet') ? movenet.load(instance.config) : null),
instance.models.nanodet || (instance.config.object.enabled && instance.config.object.modelPath.includes('nanodet') ? nanodet.load(instance.config) : null),
instance.models.centernet || (instance.config.object.enabled && instance.config.object.modelPath.includes('centernet') ? centernet.load(instance.config) : null),
instance.models.faceres || ((instance.config.face.enabled && instance.config.face.description.enabled) ? faceres.load(instance.config) : null),
instance.models.segmentation || (instance.config.segmentation.enabled ? segmentation.load(instance.config) : null),
]);
} else { // load models sequentially
if (instance.config.face.enabled && !instance.models.face) instance.models.face = await facemesh.load(instance.config);
if (instance.config.face.enabled && instance.config.face.emotion.enabled && !instance.models.emotion) instance.models.emotion = await emotion.load(instance.config);
if (instance.config.hand.enabled && !instance.models.handpose) instance.models.handpose = await handpose.load(instance.config);
if (instance.config.body.enabled && !instance.models.posenet && instance.config.body.modelPath.includes('posenet')) instance.models.posenet = await posenet.load(instance.config);
if (instance.config.body.enabled && !instance.models.blazepose && instance.config.body.modelPath.includes('blazepose')) instance.models.blazepose = await blazepose.load(instance.config);
if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body.modelPath.includes('efficientpose')) instance.models.efficientpose = await blazepose.load(instance.config);
if (instance.config.body.enabled && !instance.models.movenet && instance.config.body.modelPath.includes('movenet')) instance.models.movenet = await movenet.load(instance.config);
if (instance.config.object.enabled && !instance.models.nanodet && instance.config.object.modelPath.includes('nanodet')) instance.models.nanodet = await nanodet.load(instance.config);
if (instance.config.object.enabled && !instance.models.centernet && instance.config.object.modelPath.includes('centernet')) instance.models.centernet = await centernet.load(instance.config);
if (instance.config.face.enabled && instance.config.face.description.enabled && !instance.models.faceres) instance.models.faceres = await faceres.load(instance.config);
if (instance.config.segmentation.enabled && !instance.models.segmentation) instance.models.segmentation = await segmentation.load(instance.config);
}
}