human/src/human.ts

543 lines
22 KiB
TypeScript
Raw Normal View History

2021-05-25 14:58:20 +02:00
/**
* Human main module
*/
2021-03-21 12:49:55 +01:00
import { log, now, mergeDeep } from './helpers';
2021-05-05 16:07:44 +02:00
import { Config, defaults } from './config';
2021-09-16 00:58:54 +02:00
import type { Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult } from './result';
2020-11-18 14:26:28 +01:00
import * as tf from '../dist/tfjs.esm.js';
2021-06-18 15:16:21 +02:00
import * as models from './models';
import * as face from './face';
2021-02-08 17:39:09 +01:00
import * as facemesh from './blazeface/facemesh';
2021-03-21 19:18:51 +01:00
import * as faceres from './faceres/faceres';
2021-02-08 17:39:09 +01:00
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
2021-03-04 16:33:08 +01:00
import * as blazepose from './blazepose/blazepose';
import * as efficientpose from './efficientpose/efficientpose';
import * as movenet from './movenet/movenet';
import * as nanodet from './object/nanodet';
import * as centernet from './object/centernet';
2021-06-18 15:16:21 +02:00
import * as segmentation from './segmentation/segmentation';
2021-02-08 17:39:09 +01:00
import * as gesture from './gesture/gesture';
2021-03-17 23:23:19 +01:00
import * as image from './image/image';
2021-09-16 00:58:54 +02:00
import * as draw from './draw';
import * as persons from './persons';
import * as interpolate from './interpolate';
2021-09-12 18:42:17 +02:00
import * as env from './env';
import * as backend from './tfjs/backend';
2021-09-17 17:23:00 +02:00
import * as humangl from './tfjs/humangl';
2020-11-10 02:13:38 +01:00
import * as app from '../package.json';
import * as warmups from './warmup';
2021-09-13 19:28:35 +02:00
import type { Tensor, GraphModel } from './tfjs/types';
2021-09-16 00:58:54 +02:00
import type { DrawOptions } from './draw';
2021-04-13 17:05:52 +02:00
// export types
2021-09-12 05:54:35 +02:00
export * from './config';
export * from './result';
2021-09-16 00:58:54 +02:00
export type { DrawOptions } from './draw';
2021-09-12 18:42:17 +02:00
export { env } from './env';
2021-04-13 17:05:52 +02:00
2021-05-31 00:45:39 +02:00
/** Defines all possible input types for **Human** detection
* @typedef Input Type
2021-05-31 00:45:39 +02:00
*/
2021-09-16 00:58:54 +02:00
export type Input = Tensor | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
2021-04-06 17:38:01 +02:00
2021-09-12 06:30:11 +02:00
/** Events dispatched by `human.events`
* - `create`: triggered when Human object is instantiated
* - `load`: triggered when models are loaded (explicitly or on-demand)
* - `image`: triggered when input image is this.processed
* - `result`: triggered when detection is complete
* - `warmup`: triggered when warmup is complete
*/
export type Events = 'create' | 'load' | 'image' | 'result' | 'warmup';
2021-05-31 00:45:39 +02:00
/** Error message
* @typedef Error Type
2021-05-31 00:45:39 +02:00
*/
2021-03-18 01:16:40 +01:00
export type Error = { error: string };
2021-04-13 17:05:52 +02:00
2021-06-07 01:00:34 +02:00
/** Instance of TensorFlow/JS
* @external
*/
2021-03-17 23:23:19 +01:00
export type TensorFlow = typeof tf;
2021-04-13 17:05:52 +02:00
2021-03-17 23:23:19 +01:00
/**
2021-03-17 23:33:12 +01:00
* **Human** library main class
2021-03-17 23:23:19 +01:00
*
* All methods and properties are available only as members of Human class
*
2021-03-17 23:33:12 +01:00
* - Configuration object definition: {@link Config}
* - Results object definition: {@link Result}
* - Possible inputs: {@link Input}
2021-05-31 00:45:39 +02:00
*
* @param userConfig: {@link Config}
2021-03-17 23:23:19 +01:00
*/
2021-03-14 04:31:09 +01:00
export class Human {
/** Current version of Human library in *semver* format */
2021-09-05 22:42:11 +02:00
version: string;
2021-09-16 00:58:54 +02:00
2021-04-13 17:05:52 +02:00
/** Current configuration
* - Details: {@link Config}
*/
2021-03-17 23:23:19 +01:00
config: Config;
2021-09-16 00:58:54 +02:00
2021-05-30 00:29:57 +02:00
/** Last known result of detect run
* - Can be accessed anytime after initial detection
*/
result: Result;
2021-09-16 00:58:54 +02:00
2021-04-13 17:05:52 +02:00
/** Current state of Human library
* - Can be polled to determine operations that are currently executed
* - Progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
2021-04-13 17:05:52 +02:00
*/
2021-03-18 01:16:40 +01:00
state: string;
2021-09-16 00:58:54 +02:00
2021-09-11 22:00:16 +02:00
/** currenty processed image tensor and canvas */
process: { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null };
2021-09-16 00:58:54 +02:00
2021-05-31 00:45:39 +02:00
/** @internal: Instance of TensorFlow/JS used by Human
2021-04-13 17:05:52 +02:00
* - Can be embedded or externally provided
*/
2021-03-17 23:23:19 +01:00
tf: TensorFlow;
2021-09-16 00:58:54 +02:00
2021-09-12 18:42:17 +02:00
/**
* Object containing environment information used for diagnostics
*/
env: env.Env;
2021-09-16 00:58:54 +02:00
2021-06-18 15:16:21 +02:00
/** Draw helper classes that can draw detected objects on canvas using specified draw
* - options: {@link DrawOptions} global settings for all draw operations, can be overriden for each draw method
2021-04-13 17:05:52 +02:00
* - face: draw detected faces
* - body: draw detected people and body parts
* - hand: draw detected hands and hand parts
2021-09-11 22:00:16 +02:00
* - canvas: draw this.processed canvas which is a this.processed copy of the input
2021-04-13 17:05:52 +02:00
* - all: meta-function that performs: canvas, face, body, hand
*/
2021-09-16 00:58:54 +02:00
// draw: typeof draw;
draw: { canvas, face, body, hand, gesture, object, person, all, options: DrawOptions };
2021-05-31 00:45:39 +02:00
/** @internal: Currently loaded models */
models: {
2021-06-18 15:16:21 +02:00
face: [unknown, GraphModel | null, GraphModel | null] | null,
posenet: GraphModel | null,
blazepose: GraphModel | null,
efficientpose: GraphModel | null,
movenet: GraphModel | null,
handpose: [GraphModel | null, GraphModel | null] | null,
age: GraphModel | null,
gender: GraphModel | null,
emotion: GraphModel | null,
embedding: GraphModel | null,
nanodet: GraphModel | null,
centernet: GraphModel | null,
faceres: GraphModel | null,
segmentation: GraphModel | null,
};
2021-09-16 00:58:54 +02:00
2021-09-11 22:00:16 +02:00
/** Container for events dispatched by Human
*
* Possible events:
* - `create`: triggered when Human object is instantiated
* - `load`: triggered when models are loaded (explicitly or on-demand)
* - `image`: triggered when input image is this.processed
* - `result`: triggered when detection is complete
* - `warmup`: triggered when warmup is complete
*/
events: EventTarget;
/** Reference face triangualtion array of 468 points, used for triangle references between points */
2021-03-29 21:59:16 +02:00
faceTriangulation: typeof facemesh.triangulation;
/** Refernce UV map of 468 values, used for 3D mapping of the face mesh */
2021-03-29 21:59:16 +02:00
faceUVMap: typeof facemesh.uvmap;
2021-04-13 17:05:52 +02:00
/** Performance object that contains values for all recently performed operations */
2021-06-03 15:41:53 +02:00
performance: Record<string, number>; // perf members are dynamically defined as needed
#numTensors: number;
2021-03-18 01:16:40 +01:00
#analyzeMemoryLeaks: boolean;
#checkSanity: boolean;
2021-09-17 17:23:00 +02:00
/** WebGL debug info */
gl: Record<string, unknown>;
// definition end
2021-02-08 17:39:09 +01:00
2021-04-13 17:05:52 +02:00
/**
* Creates instance of Human library that is futher used for all operations
2021-05-31 00:45:39 +02:00
* @param userConfig: {@link Config}
2021-04-13 17:05:52 +02:00
*/
2021-09-11 22:11:00 +02:00
constructor(userConfig?: Partial<Config>) {
2021-09-12 18:42:17 +02:00
env.get();
this.env = env.env;
defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`;
defaults.modelBasePath = this.env.browser ? '../models/' : 'file://models/';
defaults.backend = this.env.browser ? 'humangl' : 'tensorflow';
2021-09-05 22:42:11 +02:00
this.version = app.version; // expose version property on instance of class
Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself
this.config = mergeDeep(defaults, userConfig || {});
this.tf = tf;
this.state = 'idle';
this.#numTensors = 0;
this.#analyzeMemoryLeaks = false;
this.#checkSanity = false;
this.performance = { backend: 0, load: 0, image: 0, frames: 0, cached: 0, changed: 0, total: 0, draw: 0 };
2021-09-11 22:00:16 +02:00
this.events = new EventTarget();
// object that contains all initialized models
this.models = {
face: null, // array of models
2021-09-16 00:58:54 +02:00
handpose: null, // array of models
posenet: null,
2021-03-04 16:33:08 +01:00
blazepose: null,
2021-03-26 23:50:19 +01:00
efficientpose: null,
movenet: null,
age: null,
gender: null,
emotion: null,
embedding: null,
nanodet: null,
centernet: null,
2021-03-21 19:18:51 +01:00
faceres: null,
2021-06-04 19:51:01 +02:00
segmentation: null,
};
2021-09-16 00:58:54 +02:00
// reexport draw methods
this.draw = {
options: draw.options as DrawOptions,
canvas: (input: HTMLCanvasElement | OffscreenCanvas | HTMLImageElement | HTMLMediaElement | HTMLVideoElement, output: HTMLCanvasElement) => draw.canvas(input, output),
face: (output: HTMLCanvasElement | OffscreenCanvas, result: FaceResult[], options?: Partial<DrawOptions>) => draw.face(output, result, options),
body: (output: HTMLCanvasElement | OffscreenCanvas, result: BodyResult[], options?: Partial<DrawOptions>) => draw.body(output, result, options),
hand: (output: HTMLCanvasElement | OffscreenCanvas, result: HandResult[], options?: Partial<DrawOptions>) => draw.hand(output, result, options),
gesture: (output: HTMLCanvasElement | OffscreenCanvas, result: GestureResult[], options?: Partial<DrawOptions>) => draw.gesture(output, result, options),
object: (output: HTMLCanvasElement | OffscreenCanvas, result: ObjectResult[], options?: Partial<DrawOptions>) => draw.object(output, result, options),
person: (output: HTMLCanvasElement | OffscreenCanvas, result: PersonResult[], options?: Partial<DrawOptions>) => draw.person(output, result, options),
all: (output: HTMLCanvasElement | OffscreenCanvas, result: Result, options?: Partial<DrawOptions>) => draw.all(output, result, options),
};
2021-08-17 14:51:17 +02:00
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
2021-09-11 22:00:16 +02:00
// export access to image this.processing
2021-04-14 18:53:00 +02:00
// @ts-ignore eslint-typescript cannot correctly infer type in anonymous function
2021-09-11 22:00:16 +02:00
this.process = { tensor: null, canvas: null };
// export raw access to underlying models
2021-03-29 21:59:16 +02:00
this.faceTriangulation = facemesh.triangulation;
this.faceUVMap = facemesh.uvmap;
2021-09-17 17:23:00 +02:00
// set gl info
this.gl = humangl.config;
2021-03-06 16:38:04 +01:00
// include platform info
this.emit('create');
2020-10-18 18:12:09 +02:00
}
// helper function: measure tensor leak
2021-03-14 04:31:09 +01:00
/** @hidden */
2021-08-17 14:51:17 +02:00
analyze = (...msg: string[]) => {
if (!this.#analyzeMemoryLeaks) return;
const currentTensors = this.tf.engine().state.numTensors;
const previousTensors = this.#numTensors;
this.#numTensors = currentTensors;
const leaked = currentTensors - previousTensors;
2020-12-08 15:00:44 +01:00
if (leaked !== 0) log(...msg, leaked);
2020-10-18 18:12:09 +02:00
}
2020-10-17 13:15:23 +02:00
2020-11-04 16:18:22 +01:00
// quick sanity check on inputs
2021-03-14 04:31:09 +01:00
/** @hidden */
2021-08-17 14:51:17 +02:00
#sanity = (input: Input): null | string => {
if (!this.#checkSanity) return null;
2020-11-03 16:55:33 +01:00
if (!input) return 'input is not defined';
2021-09-12 19:17:33 +02:00
if (this.env.node && !(input instanceof tf.Tensor)) return 'input must be a tensor';
2020-11-03 16:55:33 +01:00
try {
2021-02-19 14:35:41 +01:00
this.tf.getBackend();
2020-11-03 16:55:33 +01:00
} catch {
return 'backend not loaded';
}
return null;
}
/** Process input as return canvas and tensor
*
* @param input: {@link Input}
* @returns { tensor, canvas }
*/
image = (input: Input) => image.process(input, this.config);
2021-04-13 17:05:52 +02:00
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
* - Calculation is based on normalized Minkowski distance between
*
2021-05-31 00:45:39 +02:00
* @param embedding1: face descriptor as array of numbers
* @param embedding2: face descriptor as array of numbers
* @returns similarity: number
2021-04-13 17:05:52 +02:00
*/
2021-04-25 00:43:59 +02:00
// eslint-disable-next-line class-methods-use-this
2021-03-21 19:18:51 +01:00
similarity(embedding1: Array<number>, embedding2: Array<number>): number {
2021-04-25 00:43:59 +02:00
return faceres.similarity(embedding1, embedding2);
2020-11-13 22:13:35 +01:00
}
/**
2021-09-11 22:00:16 +02:00
* Segmentation method takes any input and returns this.processed canvas with body segmentation
* Optional parameter background is used to fill the background with specific input
2021-09-11 22:00:16 +02:00
* Segmentation is not triggered as part of detect this.process
*
* @param input: {@link Input}
* @param background?: {@link Input}
* @returns Canvas
*/
2021-09-15 19:59:18 +02:00
async segmentation(input: Input, background?: Input) {
2021-09-13 19:28:35 +02:00
return input ? segmentation.process(input, background, this.config) : null;
}
2021-09-11 22:00:16 +02:00
/** Enhance method performs additional enhacements to face image previously detected for futher this.processing
2021-05-31 00:45:39 +02:00
* @param input: Tensor as provided in human.result.face[n].tensor
2021-04-13 17:05:52 +02:00
* @returns Tensor
*/
// eslint-disable-next-line class-methods-use-this
2021-03-14 04:31:09 +01:00
enhance(input: Tensor): Tensor | null {
2021-03-21 19:18:51 +01:00
return faceres.enhance(input);
}
2021-05-31 00:45:39 +02:00
/** Math method find best match between provided face descriptor and predefined database of known descriptors
2021-04-13 17:05:52 +02:00
* @param faceEmbedding: face descriptor previsouly calculated on any face
* @param db: array of mapping of face descriptors to known values
* @param threshold: minimum score for matching to be considered in the result
* @returns best match
*/
// eslint-disable-next-line class-methods-use-this
2021-03-21 19:18:51 +01:00
match(faceEmbedding: Array<number>, db: Array<{ name: string, source: string, embedding: number[] }>, threshold = 0): { name: string, source: string, similarity: number, embedding: number[] } {
return faceres.match(faceEmbedding, db, threshold);
2021-03-12 18:54:08 +01:00
}
2021-04-13 17:05:52 +02:00
/** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
* @param userConfig?: {@link Config}
2021-04-13 17:05:52 +02:00
*/
2021-09-11 22:11:00 +02:00
async load(userConfig?: Partial<Config>) {
2020-11-06 17:39:39 +01:00
this.state = 'load';
const timeStamp = now();
2021-09-11 22:00:16 +02:00
const count = Object.values(this.models).filter((model) => model).length;
2021-06-03 15:41:53 +02:00
if (userConfig) this.config = mergeDeep(this.config, userConfig) as Config;
2020-11-03 15:34:36 +01:00
2021-09-17 17:23:00 +02:00
if (env.env.initial) { // print version info on first run and check for correct backend setup
2021-09-05 22:42:11 +02:00
if (this.config.debug) log(`version: ${this.version}`);
2021-03-06 16:38:04 +01:00
if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);
await backend.check(this);
await tf.ready();
2021-09-12 19:17:33 +02:00
if (this.env.browser) {
2021-03-02 17:27:42 +01:00
if (this.config.debug) log('configuration:', this.config);
if (this.config.debug) log('tf flags:', this.tf.ENV.flags);
}
2020-11-03 15:34:36 +01:00
}
2021-06-18 15:16:21 +02:00
await models.load(this); // actually loads models
2021-09-17 17:23:00 +02:00
if (env.env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run
env.env.initial = false;
2021-01-12 15:55:08 +01:00
2021-09-11 22:00:16 +02:00
const loaded = Object.values(this.models).filter((model) => model).length;
if (loaded !== count) { // number of loaded models changed
await models.validate(this); // validate kernel ops used by model against current backend
this.emit('load');
}
2020-11-06 17:39:39 +01:00
const current = Math.trunc(now() - timeStamp);
if (current > (this.performance.load as number || 0)) this.performance.load = current;
}
2020-10-17 17:38:24 +02:00
2021-09-11 22:00:16 +02:00
// emit event
/** @hidden */
emit = (event: string) => this.events?.dispatchEvent(new Event(event));
2020-10-30 15:23:49 +01:00
/**
* Runs interpolation using last known result and returns smoothened result
* Interpolation is based on time since last known result so can be called independently
*
* @param result?: {@link Result} optional use specific result set to run interpolation on
* @returns result: {@link Result}
*/
2021-09-15 19:59:18 +02:00
next(result: Result = this.result) {
return interpolate.calc(result) as Result;
}
/** Warmup method pre-initializes all configured models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
* @param userConfig?: {@link Config}
2021-09-15 19:59:18 +02:00
* @returns result: {@link Result}
*/
2021-09-15 19:59:18 +02:00
async warmup(userConfig?: Partial<Config>): Promise<Result | { error }> {
return warmups.warmup(this, userConfig) as Promise<Result | { error }>;
}
2021-04-13 17:05:52 +02:00
/** Main detection method
* - Analyze configuration: {@link Config}
2021-09-11 22:00:16 +02:00
* - Pre-this.process input: {@link Input}
2021-04-13 17:05:52 +02:00
* - Run inference for all configured models
* - Process and return result: {@link Result}
*
* @param input: {@link Input}
* @param userConfig?: {@link Config}
* @returns result: {@link Result}
2021-04-13 17:05:52 +02:00
*/
2021-09-11 22:11:00 +02:00
async detect(input: Input, userConfig?: Partial<Config>): Promise<Result | Error> {
2020-11-04 16:18:22 +01:00
// detection happens inside a promise
return new Promise(async (resolve) => {
2020-11-13 22:13:35 +01:00
this.state = 'config';
let timeStamp;
2021-06-05 23:51:46 +02:00
let elapsedTime;
2020-11-13 22:13:35 +01:00
// update configuration
2021-06-03 15:41:53 +02:00
this.config = mergeDeep(this.config, userConfig) as Config;
2020-11-13 22:13:35 +01:00
// sanity checks
this.state = 'check';
const error = this.#sanity(input);
2020-11-13 22:13:35 +01:00
if (error) {
2020-12-08 15:00:44 +01:00
log(error, input);
2020-11-13 22:13:35 +01:00
resolve({ error });
}
const timeStart = now();
// configure backend if needed
await backend.check(this);
// load models if enabled
await this.load();
2020-10-16 16:12:12 +02:00
timeStamp = now();
2021-09-13 19:28:35 +02:00
let img = image.process(input, this.config);
this.process = img;
2021-06-05 23:51:46 +02:00
this.performance.image = Math.trunc(now() - timeStamp);
this.analyze('Get Image:');
2021-09-11 22:00:16 +02:00
// run segmentation prethis.processing
2021-09-13 19:28:35 +02:00
if (this.config.segmentation.enabled && this.process && img.tensor && img.canvas) {
2021-06-05 23:51:46 +02:00
this.analyze('Start Segmentation:');
this.state = 'run:segmentation';
timeStamp = now();
2021-09-13 19:28:35 +02:00
await segmentation.predict(img);
2021-06-05 23:51:46 +02:00
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.segmentation = elapsedTime;
2021-09-13 19:28:35 +02:00
if (img.canvas) {
2021-06-05 23:51:46 +02:00
// replace input
2021-09-13 19:28:35 +02:00
tf.dispose(img.tensor);
img = image.process(img.canvas, this.config);
2021-06-05 23:51:46 +02:00
}
this.analyze('End Segmentation:');
}
2021-09-13 19:28:35 +02:00
if (!img.tensor) {
2020-12-08 15:00:44 +01:00
log('could not convert input to tensor');
2020-11-20 13:52:50 +01:00
resolve({ error: 'could not convert input to tensor' });
return;
}
this.emit('image');
timeStamp = now();
2021-09-13 19:28:35 +02:00
this.config.skipFrame = await image.skip(this.config, img.tensor);
if (!this.performance.frames) this.performance.frames = 0;
if (!this.performance.cached) this.performance.cached = 0;
(this.performance.frames as number)++;
if (this.config.skipFrame) this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp);
this.analyze('Check Changed:');
2021-03-06 23:22:47 +01:00
// prepare where to store model results
2021-05-22 20:53:51 +02:00
// keep them with weak typing as it can be promise or not
2021-09-12 05:54:35 +02:00
let faceRes: FaceResult[] | Promise<FaceResult[]> | never[] = [];
let bodyRes: BodyResult[] | Promise<BodyResult[]> | never[] = [];
let handRes: HandResult[] | Promise<HandResult[]> | never[] = [];
let objectRes: ObjectResult[] | Promise<ObjectResult[]> | never[] = [];
2021-03-06 23:22:47 +01:00
2020-11-06 17:39:39 +01:00
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
if (this.config.async) {
2021-09-13 19:28:35 +02:00
faceRes = this.config.face.enabled ? face.detectFace(this, img.tensor) : [];
if (this.performance.face) delete this.performance.face;
2020-11-06 17:39:39 +01:00
} else {
this.state = 'run:face';
2020-10-16 16:12:12 +02:00
timeStamp = now();
2021-09-13 19:28:35 +02:00
faceRes = this.config.face.enabled ? await face.detectFace(this, img.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.face = elapsedTime;
2020-10-14 02:52:30 +02:00
}
2020-10-13 04:01:35 +02:00
// run body: can be posenet, blazepose, efficientpose, movenet
2021-03-21 12:49:55 +01:00
this.analyze('Start Body:');
2020-11-04 07:11:24 +01:00
if (this.config.async) {
2021-09-13 19:28:35 +02:00
if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(img.tensor, this.config) : [];
else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(img.tensor, this.config) : [];
else if (this.config.body.modelPath?.includes('efficientpose')) bodyRes = this.config.body.enabled ? efficientpose.predict(img.tensor, this.config) : [];
else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? movenet.predict(img.tensor, this.config) : [];
if (this.performance.body) delete this.performance.body;
2020-11-04 07:11:24 +01:00
} else {
this.state = 'run:body';
timeStamp = now();
2021-09-13 19:28:35 +02:00
if (this.config.body.modelPath?.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(img.tensor, this.config) : [];
else if (this.config.body.modelPath?.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(img.tensor, this.config) : [];
else if (this.config.body.modelPath?.includes('efficientpose')) bodyRes = this.config.body.enabled ? await efficientpose.predict(img.tensor, this.config) : [];
else if (this.config.body.modelPath?.includes('movenet')) bodyRes = this.config.body.enabled ? await movenet.predict(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.body = elapsedTime;
2020-11-04 07:11:24 +01:00
}
2021-03-21 12:49:55 +01:00
this.analyze('End Body:');
2020-11-04 07:11:24 +01:00
// run handpose
2021-03-21 12:49:55 +01:00
this.analyze('Start Hand:');
2020-11-04 07:11:24 +01:00
if (this.config.async) {
2021-09-13 19:28:35 +02:00
handRes = this.config.hand.enabled ? handpose.predict(img.tensor, this.config) : [];
if (this.performance.hand) delete this.performance.hand;
2020-11-04 07:11:24 +01:00
} else {
this.state = 'run:hand';
timeStamp = now();
2021-09-13 19:28:35 +02:00
handRes = this.config.hand.enabled ? await handpose.predict(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.hand = elapsedTime;
2020-11-04 07:11:24 +01:00
}
2021-03-21 12:49:55 +01:00
this.analyze('End Hand:');
2020-11-04 07:11:24 +01:00
// run nanodet
2021-03-21 12:49:55 +01:00
this.analyze('Start Object:');
if (this.config.async) {
2021-09-13 19:28:35 +02:00
if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? nanodet.predict(img.tensor, this.config) : [];
else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? centernet.predict(img.tensor, this.config) : [];
if (this.performance.object) delete this.performance.object;
} else {
this.state = 'run:object';
timeStamp = now();
2021-09-13 19:28:35 +02:00
if (this.config.object.modelPath?.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(img.tensor, this.config) : [];
else if (this.config.object.modelPath?.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.object = elapsedTime;
}
2021-03-21 12:49:55 +01:00
this.analyze('End Object:');
2020-11-06 17:39:39 +01:00
// if async wait for results
2021-05-25 14:58:20 +02:00
if (this.config.async) [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
2020-10-14 17:43:33 +02:00
// run gesture analysis last
2021-09-12 05:54:35 +02:00
let gestureRes: GestureResult[] = [];
2020-11-04 16:18:22 +01:00
if (this.config.gesture.enabled) {
timeStamp = now();
2021-03-04 16:33:08 +01:00
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
if (!this.config.async) this.performance.gesture = Math.trunc(now() - timeStamp);
else if (this.performance.gesture) delete this.performance.gesture;
2020-11-04 16:18:22 +01:00
}
this.performance.total = Math.trunc(now() - timeStart);
2020-11-06 17:39:39 +01:00
this.state = 'idle';
2021-09-11 22:00:16 +02:00
const shape = this.process?.tensor?.shape || [];
2021-05-30 00:29:57 +02:00
this.result = {
2021-09-12 05:54:35 +02:00
face: faceRes as FaceResult[],
body: bodyRes as BodyResult[],
hand: handRes as HandResult[],
2021-03-17 19:35:11 +01:00
gesture: gestureRes,
2021-09-12 05:54:35 +02:00
object: objectRes as ObjectResult[],
performance: this.performance,
2021-09-11 22:00:16 +02:00
canvas: this.process.canvas,
2021-05-22 18:41:29 +02:00
timestamp: Date.now(),
2021-09-12 05:54:35 +02:00
get persons() { return persons.join(faceRes as FaceResult[], bodyRes as BodyResult[], handRes as HandResult[], gestureRes, shape); },
2021-03-17 19:35:11 +01:00
};
2021-05-25 14:58:20 +02:00
// finally dispose input tensor
2021-09-13 19:28:35 +02:00
tf.dispose(img.tensor);
2021-05-25 14:58:20 +02:00
2021-03-17 19:35:11 +01:00
// log('Result:', result);
this.emit('detect');
2021-05-30 00:29:57 +02:00
resolve(this.result);
});
}
2020-10-12 01:22:43 +02:00
}
2021-03-17 23:23:19 +01:00
/**
* Class Human is also available as default export
*/
export { Human as default };