human/src/human.ts

658 lines
27 KiB
TypeScript
Raw Normal View History

2021-05-25 14:58:20 +02:00
/**
* Human main module
*/
2021-03-21 12:49:55 +01:00
import { log, now, mergeDeep } from './helpers';
2021-05-05 16:07:44 +02:00
import { Config, defaults } from './config';
2021-05-22 20:53:51 +02:00
import { Result, Gesture } from './result';
2021-03-06 16:38:04 +01:00
import * as sysinfo from './sysinfo';
2020-11-18 14:26:28 +01:00
import * as tf from '../dist/tfjs.esm.js';
2021-02-08 17:39:09 +01:00
import * as backend from './tfjs/backend';
import * as face from './face';
2021-02-08 17:39:09 +01:00
import * as facemesh from './blazeface/facemesh';
2021-03-21 19:18:51 +01:00
import * as faceres from './faceres/faceres';
2021-02-08 17:39:09 +01:00
import * as emotion from './emotion/emotion';
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
2021-03-04 16:33:08 +01:00
import * as blazepose from './blazepose/blazepose';
import * as nanodet from './object/nanodet';
import * as centernet from './object/centernet';
2021-02-08 17:39:09 +01:00
import * as gesture from './gesture/gesture';
2021-03-17 23:23:19 +01:00
import * as image from './image/image';
import * as draw from './draw/draw';
import * as persons from './persons';
2021-02-08 17:39:09 +01:00
import * as sample from './sample';
2020-11-10 02:13:38 +01:00
import * as app from '../package.json';
import { Tensor } from './tfjs/types';
2021-04-13 17:05:52 +02:00
// export types
2021-03-17 23:23:19 +01:00
export type { Config } from './config';
2021-05-22 18:33:19 +02:00
export type { Result, Face, Hand, Body, Item, Gesture } from './result';
2021-04-13 17:05:52 +02:00
export type { DrawOptions } from './draw/draw';
2021-03-17 23:23:19 +01:00
/** Defines all possible input types for **Human** detection */
2021-04-06 17:38:01 +02:00
export type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
2021-03-17 23:23:19 +01:00
/** Error message */
2021-03-18 01:16:40 +01:00
export type Error = { error: string };
2021-04-13 17:05:52 +02:00
2021-03-18 01:26:43 +01:00
/** Instance of TensorFlow/JS */
2021-03-17 23:23:19 +01:00
export type TensorFlow = typeof tf;
2021-04-13 17:05:52 +02:00
2021-03-18 01:26:43 +01:00
/** Generic Model object type, holds instance of individual models */
type Model = unknown;
2020-10-15 21:25:58 +02:00
2021-03-17 23:23:19 +01:00
/**
2021-03-17 23:33:12 +01:00
* **Human** library main class
2021-03-17 23:23:19 +01:00
*
* All methods and properties are available only as members of Human class
*
2021-03-17 23:33:12 +01:00
* - Configuration object definition: {@link Config}
* - Results object definition: {@link Result}
* - Possible inputs: {@link Input}
2021-03-17 23:23:19 +01:00
*/
2021-03-14 04:31:09 +01:00
export class Human {
2021-04-13 17:05:52 +02:00
/** Current version of Human library in semver format */
2021-03-18 01:16:40 +01:00
version: string;
2021-04-13 17:05:52 +02:00
/** Current configuration
* - Details: {@link Config}
*/
2021-03-17 23:23:19 +01:00
config: Config;
2021-04-13 17:05:52 +02:00
/** Current state of Human library
* - Can be polled to determine operations that are currently executed
*/
2021-03-18 01:16:40 +01:00
state: string;
2021-04-13 17:05:52 +02:00
/** Internal: Instance of current image being processed */
image: { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null };
/** Internal: Instance of TensorFlow/JS used by Human
* - Can be embedded or externally provided
*/
2021-03-17 23:23:19 +01:00
tf: TensorFlow;
2021-04-13 17:05:52 +02:00
/** Draw helper classes that can draw detected objects on canvas using specified draw styles
* - options: global settings for all draw operations, can be overriden for each draw method, for details see {@link DrawOptions}
* - face: draw detected faces
* - body: draw detected people and body parts
* - hand: draw detected hands and hand parts
* - canvas: draw processed canvas which is a processed copy of the input
* - all: meta-function that performs: canvas, face, body, hand
*/
2021-03-17 23:23:19 +01:00
draw: {
2021-04-13 17:05:52 +02:00
options: draw.DrawOptions,
2021-03-17 23:23:19 +01:00
gesture: typeof draw.gesture,
face: typeof draw.face,
body: typeof draw.body,
hand: typeof draw.hand,
canvas: typeof draw.canvas,
all: typeof draw.all,
};
2021-04-13 17:05:52 +02:00
/** Internal: Currently loaded models */
models: {
2021-04-25 22:56:10 +02:00
face: [Model, Model, Model] | null,
2021-04-24 22:04:49 +02:00
posenet: Model | null,
2021-03-14 04:31:09 +01:00
blazepose: Model | null,
2021-03-26 23:50:19 +01:00
efficientpose: Model | null,
2021-04-25 22:56:10 +02:00
handpose: [Model, Model] | null,
2021-03-14 04:31:09 +01:00
iris: Model | null,
age: Model | null,
gender: Model | null,
emotion: Model | null,
embedding: Model | null,
nanodet: Model | null,
centernet: Model | null,
2021-03-21 19:18:51 +01:00
faceres: Model | null,
};
2021-04-13 17:05:52 +02:00
/** Internal: Currently loaded classes */
classes: {
facemesh: typeof facemesh;
emotion: typeof emotion;
body: typeof posenet | typeof blazepose;
hand: typeof handpose;
nanodet: typeof nanodet;
centernet: typeof centernet;
2021-03-21 19:18:51 +01:00
faceres: typeof faceres;
};
2021-04-13 17:05:52 +02:00
/** Face triangualtion array of 468 points, used for triangle references between points */
2021-03-29 21:59:16 +02:00
faceTriangulation: typeof facemesh.triangulation;
2021-04-13 17:05:52 +02:00
/** UV map of 468 values, used for 3D mapping of the face mesh */
2021-03-29 21:59:16 +02:00
faceUVMap: typeof facemesh.uvmap;
2021-04-13 17:05:52 +02:00
/** Platform and agent information detected by Human */
2021-03-18 01:16:40 +01:00
sysinfo: { platform: string, agent: string };
2021-04-13 17:05:52 +02:00
/** Performance object that contains values for all recently performed operations */
perf: Record<string, unknown>; // perf members are dynamically defined as needed
#numTensors: number;
2021-03-18 01:16:40 +01:00
#analyzeMemoryLeaks: boolean;
#checkSanity: boolean;
#firstRun: boolean;
2021-05-21 01:14:07 +02:00
#lastInputSum: number;
#lastCacheDiff: number;
2021-04-13 17:05:52 +02:00
// definition end
2021-02-08 17:39:09 +01:00
2021-04-13 17:05:52 +02:00
/**
* Creates instance of Human library that is futher used for all operations
* - @param userConfig: {@link Config}
*/
constructor(userConfig: Config | Record<string, unknown> = {}) {
this.tf = tf;
2021-03-05 17:43:50 +01:00
this.draw = draw;
this.version = app.version;
2021-03-17 23:23:19 +01:00
this.config = mergeDeep(defaults, userConfig);
this.state = 'idle';
this.#numTensors = 0;
this.#analyzeMemoryLeaks = false;
this.#checkSanity = false;
this.#firstRun = true;
2021-05-21 01:14:07 +02:00
this.#lastCacheDiff = 0;
2021-03-21 12:49:55 +01:00
this.perf = {};
// object that contains all initialized models
this.models = {
face: null,
posenet: null,
2021-03-04 16:33:08 +01:00
blazepose: null,
2021-03-26 23:50:19 +01:00
efficientpose: null,
handpose: null,
iris: null,
age: null,
gender: null,
emotion: null,
embedding: null,
nanodet: null,
centernet: null,
2021-03-21 19:18:51 +01:00
faceres: null,
};
2021-03-04 16:33:08 +01:00
// export access to image processing
2021-04-14 18:53:00 +02:00
// @ts-ignore eslint-typescript cannot correctly infer type in anonymous function
2021-03-17 23:23:19 +01:00
this.image = (input: Input) => image.process(input, this.config);
// export raw access to underlying models
this.classes = {
facemesh,
emotion,
2021-03-21 19:18:51 +01:00
faceres,
2021-03-11 16:26:14 +01:00
body: this.config.body.modelPath.includes('posenet') ? posenet : blazepose,
hand: handpose,
nanodet,
centernet,
};
2021-03-29 21:59:16 +02:00
this.faceTriangulation = facemesh.triangulation;
this.faceUVMap = facemesh.uvmap;
2021-03-06 16:38:04 +01:00
// include platform info
this.sysinfo = sysinfo.info();
this.#lastInputSum = 1;
2020-10-18 18:12:09 +02:00
}
// helper function: measure tensor leak
2021-03-14 04:31:09 +01:00
/** @hidden */
2021-03-21 12:49:55 +01:00
analyze = (...msg) => {
if (!this.#analyzeMemoryLeaks) return;
const currentTensors = this.tf.engine().state.numTensors;
const previousTensors = this.#numTensors;
this.#numTensors = currentTensors;
const leaked = currentTensors - previousTensors;
2020-12-08 15:00:44 +01:00
if (leaked !== 0) log(...msg, leaked);
2020-10-18 18:12:09 +02:00
}
2020-10-17 13:15:23 +02:00
2020-11-04 16:18:22 +01:00
// quick sanity check on inputs
2021-03-14 04:31:09 +01:00
/** @hidden */
2021-03-18 01:16:40 +01:00
#sanity = (input): null | string => {
if (!this.#checkSanity) return null;
2020-11-03 16:55:33 +01:00
if (!input) return 'input is not defined';
2021-03-14 04:31:09 +01:00
if (this.tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) return 'input must be a tensor';
2020-11-03 16:55:33 +01:00
try {
2021-02-19 14:35:41 +01:00
this.tf.getBackend();
2020-11-03 16:55:33 +01:00
} catch {
return 'backend not loaded';
}
return null;
}
2021-04-13 17:05:52 +02:00
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
* - Calculation is based on normalized Minkowski distance between
*/
2021-04-25 00:43:59 +02:00
// eslint-disable-next-line class-methods-use-this
2021-03-21 19:18:51 +01:00
similarity(embedding1: Array<number>, embedding2: Array<number>): number {
2021-04-25 00:43:59 +02:00
return faceres.similarity(embedding1, embedding2);
2020-11-13 22:13:35 +01:00
}
2021-04-13 17:05:52 +02:00
/** Enhance method performs additional enhacements to face image previously detected for futher processing
* @param input Tensor as provided in human.result.face[n].tensor
* @returns Tensor
*/
// eslint-disable-next-line class-methods-use-this
2021-03-14 04:31:09 +01:00
enhance(input: Tensor): Tensor | null {
// @ts-ignore type mismach for Tensor
2021-03-21 19:18:51 +01:00
return faceres.enhance(input);
}
2021-04-13 17:05:52 +02:00
/**
* Math method find best match between provided face descriptor and predefined database of known descriptors
* @param faceEmbedding: face descriptor previsouly calculated on any face
* @param db: array of mapping of face descriptors to known values
* @param threshold: minimum score for matching to be considered in the result
* @returns best match
*/
// eslint-disable-next-line class-methods-use-this
2021-03-21 19:18:51 +01:00
match(faceEmbedding: Array<number>, db: Array<{ name: string, source: string, embedding: number[] }>, threshold = 0): { name: string, source: string, similarity: number, embedding: number[] } {
return faceres.match(faceEmbedding, db, threshold);
2021-03-12 18:54:08 +01:00
}
2021-04-13 17:05:52 +02:00
/** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
*/
async load(userConfig: Config | Record<string, unknown> = {}) {
2020-11-06 17:39:39 +01:00
this.state = 'load';
const timeStamp = now();
2020-11-06 21:35:58 +01:00
if (userConfig) this.config = mergeDeep(this.config, userConfig);
2020-11-03 15:34:36 +01:00
if (this.#firstRun) { // print version info on first run and check for correct backend setup
2021-03-06 16:38:04 +01:00
if (this.config.debug) log(`version: ${this.version}`);
if (this.config.debug) log(`tfjs version: ${this.tf.version_core}`);
if (this.config.debug) log('platform:', this.sysinfo.platform);
if (this.config.debug) log('agent:', this.sysinfo.agent);
await this.#checkBackend(true);
2021-02-19 14:35:41 +01:00
if (this.tf.ENV.flags.IS_BROWSER) {
2021-03-02 17:27:42 +01:00
if (this.config.debug) log('configuration:', this.config);
if (this.config.debug) log('tf flags:', this.tf.ENV.flags);
}
2020-11-03 15:34:36 +01:00
}
if (this.config.async) { // load models concurrently
2020-11-06 17:39:39 +01:00
[
2021-02-06 23:41:53 +01:00
this.models.face,
2020-11-06 17:39:39 +01:00
this.models.emotion,
this.models.handpose,
2021-03-04 16:33:08 +01:00
this.models.posenet,
this.models.blazepose,
this.models.nanodet,
this.models.centernet,
2021-03-21 19:18:51 +01:00
this.models.faceres,
2020-11-06 17:39:39 +01:00
] = await Promise.all([
2021-03-10 00:32:35 +01:00
this.models.face || (this.config.face.enabled ? facemesh.load(this.config) : null),
2020-11-12 04:40:05 +01:00
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
2021-04-14 18:53:00 +02:00
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
2021-04-25 00:43:59 +02:00
this.models.posenet || (this.config.body.enabled && this.config.body.modelPath.includes('posenet') ? posenet.load(this.config) : null),
2021-03-26 23:50:19 +01:00
this.models.blazepose || (this.config.body.enabled && this.config.body.modelPath.includes('blazepose') ? blazepose.load(this.config) : null),
this.models.nanodet || (this.config.object.enabled && this.config.object.modelPath.includes('nanodet') ? nanodet.load(this.config) : null),
this.models.centernet || (this.config.object.enabled && this.config.object.modelPath.includes('centernet') ? centernet.load(this.config) : null),
2021-03-21 19:18:51 +01:00
this.models.faceres || ((this.config.face.enabled && this.config.face.description.enabled) ? faceres.load(this.config) : null),
2020-11-06 17:39:39 +01:00
]);
} else { // load models sequentially
2021-03-10 00:32:35 +01:00
if (this.config.face.enabled && !this.models.face) this.models.face = await facemesh.load(this.config);
2020-11-07 16:37:19 +01:00
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
2021-03-11 16:26:14 +01:00
if (this.config.body.enabled && !this.models.posenet && this.config.body.modelPath.includes('posenet')) this.models.posenet = await posenet.load(this.config);
if (this.config.body.enabled && !this.models.blazepose && this.config.body.modelPath.includes('blazepose')) this.models.blazepose = await blazepose.load(this.config);
if (this.config.object.enabled && !this.models.nanodet && this.config.object.modelPath.includes('nanodet')) this.models.nanodet = await nanodet.load(this.config);
if (this.config.object.enabled && !this.models.centernet && this.config.object.modelPath.includes('centernet')) this.models.centernet = await centernet.load(this.config);
2021-03-21 19:18:51 +01:00
if (this.config.face.enabled && this.config.face.description.enabled && !this.models.faceres) this.models.faceres = await faceres.load(this.config);
2020-10-18 18:12:09 +02:00
}
2021-01-12 15:55:08 +01:00
if (this.#firstRun) { // print memory stats on first run
2021-03-02 17:27:42 +01:00
if (this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors');
this.#firstRun = false;
2021-01-12 15:55:08 +01:00
}
2020-11-06 17:39:39 +01:00
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.load as number || 0)) this.perf.load = current;
}
2020-10-17 17:38:24 +02:00
2020-11-04 16:18:22 +01:00
// check if backend needs initialization if it changed
2021-03-14 04:31:09 +01:00
/** @hidden */
#checkBackend = async (force = false) => {
2021-04-13 17:05:52 +02:00
if (this.config.backend && (this.config.backend.length > 0) && force || (this.tf.getBackend() !== this.config.backend)) {
const timeStamp = now();
2020-10-30 15:23:49 +01:00
this.state = 'backend';
2020-11-01 19:07:53 +01:00
/* force backend reload
2020-10-30 16:57:23 +01:00
if (this.config.backend in tf.engine().registry) {
2020-11-01 19:07:53 +01:00
const backendFactory = tf.findBackendFactory(this.config.backend);
tf.removeBackend(this.config.backend);
tf.registerBackend(this.config.backend, backendFactory);
2020-10-30 15:23:49 +01:00
} else {
2020-12-08 15:00:44 +01:00
log('Backend not registred:', this.config.backend);
2020-10-30 15:23:49 +01:00
}
2020-11-01 19:07:53 +01:00
*/
2020-11-10 02:13:38 +01:00
2021-04-13 17:05:52 +02:00
if (this.config.backend && this.config.backend.length > 0) {
2021-04-25 13:51:01 +02:00
// @ts-ignore ignore missing type for WorkerGlobalScope as that is the point
if (typeof window === 'undefined' && typeof WorkerGlobalScope !== 'undefined' && this.config.debug) log('running inside web worker');
2021-03-28 19:22:22 +02:00
// force browser vs node backend
2021-03-21 22:47:00 +01:00
if (this.tf.ENV.flags.IS_BROWSER && this.config.backend === 'tensorflow') this.config.backend = 'webgl';
2021-04-14 03:45:45 +02:00
if (this.tf.ENV.flags.IS_NODE && (this.config.backend === 'webgl' || this.config.backend === 'humangl')) this.config.backend = 'tensorflow';
2021-03-28 19:22:22 +02:00
2021-03-02 17:27:42 +01:00
if (this.config.debug) log('setting backend:', this.config.backend);
2020-11-10 02:13:38 +01:00
2021-03-02 17:27:42 +01:00
if (this.config.backend === 'wasm') {
2021-03-06 16:38:04 +01:00
if (this.config.debug) log('wasm path:', this.config.wasmPath);
2021-04-14 03:45:45 +02:00
if (typeof this.tf?.setWasmPaths !== 'undefined') this.tf.setWasmPaths(this.config.wasmPath);
else throw new Error('Human: WASM backend is not loaded');
2021-03-02 17:27:42 +01:00
const simd = await this.tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
2021-03-06 16:38:04 +01:00
const mt = await this.tf.env().getAsync('WASM_HAS_MULTITHREAD_SUPPORT');
if (this.config.debug) log(`wasm execution: ${simd ? 'SIMD' : 'no SIMD'} ${mt ? 'multithreaded' : 'singlethreaded'}`);
2021-04-14 18:53:00 +02:00
if (this.config.debug && !simd) log('warning: wasm simd support is not enabled');
2021-03-02 17:27:42 +01:00
}
2020-11-10 02:13:38 +01:00
2021-03-02 17:27:42 +01:00
if (this.config.backend === 'humangl') backend.register();
try {
await this.tf.setBackend(this.config.backend);
} catch (err) {
log('error: cannot set backend:', this.config.backend, err);
}
2020-12-13 00:34:30 +01:00
}
2021-02-19 14:35:41 +01:00
this.tf.enableProdMode();
2021-03-28 19:22:22 +02:00
// this.tf.enableDebugMode();
if (this.tf.getBackend() === 'webgl' || this.tf.getBackend() === 'humangl') {
this.tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
2021-05-21 01:14:07 +02:00
this.tf.ENV.set('WEBGL_CPU_FORWARD', true);
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
2021-03-28 19:22:22 +02:00
this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
2021-04-25 19:16:04 +02:00
if (typeof this.config['deallocate'] !== 'undefined') {
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);
this.tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);
2020-11-09 12:32:11 +01:00
}
2021-02-19 14:35:41 +01:00
const gl = await this.tf.backend().getGPGPUContext().gl;
2021-03-02 17:27:42 +01:00
if (this.config.debug) log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
2020-11-01 19:07:53 +01:00
}
2021-02-19 14:35:41 +01:00
await this.tf.ready();
2021-03-21 12:49:55 +01:00
this.perf.backend = Math.trunc(now() - timeStamp);
2020-11-06 17:39:39 +01:00
}
2020-10-30 15:23:49 +01:00
}
// check if input changed sufficiently to trigger new detections
/** @hidden */
#skipFrame = async (input) => {
if (this.config.cacheSensitivity === 0) return false;
const resizeFact = 32;
const reduced: Tensor = input.resizeBilinear([Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
2021-05-21 01:14:07 +02:00
// use tensor sum
/*
const sumT = this.tf.sum(reduced);
const sum = sumT.dataSync()[0] as number;
sumT.dispose();
*/
// use js loop sum, faster than uploading tensor to gpu calculating and downloading back
const reducedData = reduced.dataSync(); // raw image rgb array
2021-05-21 01:14:07 +02:00
let sum = 0;
for (let i = 0; i < reducedData.length / 3; i++) sum += reducedData[3 * i + 2]; // look only at green value of each pixel
2021-05-21 01:14:07 +02:00
reduced.dispose();
const diff = 100 * (Math.max(sum, this.#lastInputSum) / Math.min(sum, this.#lastInputSum) - 1);
this.#lastInputSum = sum;
2021-05-21 01:14:07 +02:00
// if previous frame was skipped, skip this frame if changed more than cacheSensitivity
// if previous frame was not skipped, then look for cacheSensitivity or difference larger than one in previous frame to avoid resetting cache in subsequent frames unnecessarily
const skipFrame = diff < Math.max(this.config.cacheSensitivity, this.#lastCacheDiff);
// if difference is above 10x threshold, don't use last value to force reset cache for significant change of scenes or images
this.#lastCacheDiff = diff > 10 * this.config.cacheSensitivity ? 0 : diff;
2021-05-21 01:14:07 +02:00
return skipFrame;
}
2021-04-13 17:05:52 +02:00
/** Main detection method
* - Analyze configuration: {@link Config}
* - Pre-process input: {@link Input}
* - Run inference for all configured models
* - Process and return result: {@link Result}
*/
async detect(input: Input, userConfig: Config | Record<string, unknown> = {}): Promise<Result | Error> {
2020-11-04 16:18:22 +01:00
// detection happens inside a promise
return new Promise(async (resolve) => {
2020-11-13 22:13:35 +01:00
this.state = 'config';
let timeStamp;
// update configuration
this.config = mergeDeep(this.config, userConfig);
// sanity checks
this.state = 'check';
const error = this.#sanity(input);
2020-11-13 22:13:35 +01:00
if (error) {
2020-12-08 15:00:44 +01:00
log(error, input);
2020-11-13 22:13:35 +01:00
resolve({ error });
}
const timeStart = now();
// configure backend
await this.#checkBackend();
// load models if enabled
await this.load();
/*
// function disabled in favor of inputChanged
2021-04-25 13:51:01 +02:00
// disable video optimization for inputs of type image, but skip if inside worker thread
let previousVideoOptimized;
2021-04-25 13:51:01 +02:00
// @ts-ignore ignore missing type for WorkerGlobalScope as that is the point
if (input && this.config.videoOptimized && (typeof window !== 'undefined') && (typeof WorkerGlobalScope !== 'undefined') && (
2021-04-19 15:30:04 +02:00
(typeof HTMLImageElement !== 'undefined' && input instanceof HTMLImageElement)
|| (typeof Image !== 'undefined' && input instanceof Image)
|| (typeof ImageData !== 'undefined' && input instanceof ImageData)
|| (typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap))
) {
log('disabling video optimization');
previousVideoOptimized = this.config.videoOptimized;
this.config.videoOptimized = false;
}
*/
2020-10-16 16:12:12 +02:00
timeStamp = now();
2020-11-04 16:18:22 +01:00
const process = image.process(input, this.config);
2020-11-20 13:52:50 +01:00
if (!process || !process.tensor) {
2020-12-08 15:00:44 +01:00
log('could not convert input to tensor');
2020-11-20 13:52:50 +01:00
resolve({ error: 'could not convert input to tensor' });
return;
}
2021-03-21 12:49:55 +01:00
this.perf.image = Math.trunc(now() - timeStamp);
this.analyze('Get Image:');
timeStamp = now();
// @ts-ignore hidden dynamic property that is not part of definitions
this.config.skipFrame = await this.#skipFrame(process.tensor);
if (!this.perf.frames) this.perf.frames = 0;
if (!this.perf.cached) this.perf.cached = 0;
(this.perf.frames as number)++;
// @ts-ignore hidden dynamic property that is not part of definitions
if (this.config.skipFrame) this.perf.cached++;
this.perf.changed = Math.trunc(now() - timeStamp);
this.analyze('Check Changed:');
2021-03-06 23:22:47 +01:00
// prepare where to store model results
2021-05-22 20:53:51 +02:00
// keep them with weak typing as it can be promise or not
let faceRes;
2021-03-06 23:22:47 +01:00
let bodyRes;
let handRes;
let objectRes;
let elapsedTime;
2021-03-06 23:22:47 +01:00
2020-11-06 17:39:39 +01:00
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
if (this.config.async) {
faceRes = this.config.face.enabled ? face.detectFace(this, process.tensor) : [];
2021-03-21 12:49:55 +01:00
if (this.perf.face) delete this.perf.face;
2020-11-06 17:39:39 +01:00
} else {
this.state = 'run:face';
2020-10-16 16:12:12 +02:00
timeStamp = now();
faceRes = this.config.face.enabled ? await face.detectFace(this, process.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.perf.face = elapsedTime;
2020-10-14 02:52:30 +02:00
}
2020-10-13 04:01:35 +02:00
2021-03-04 16:33:08 +01:00
// run body: can be posenet or blazepose
2021-03-21 12:49:55 +01:00
this.analyze('Start Body:');
2020-11-04 07:11:24 +01:00
if (this.config.async) {
2021-04-24 22:04:49 +02:00
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(process.tensor, this.config) : [];
2021-03-26 23:50:19 +01:00
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(process.tensor, this.config) : [];
2021-03-21 12:49:55 +01:00
if (this.perf.body) delete this.perf.body;
2020-11-04 07:11:24 +01:00
} else {
this.state = 'run:body';
timeStamp = now();
2021-04-24 22:04:49 +02:00
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(process.tensor, this.config) : [];
2021-03-26 23:50:19 +01:00
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.perf.body = elapsedTime;
2020-11-04 07:11:24 +01:00
}
2021-03-21 12:49:55 +01:00
this.analyze('End Body:');
2020-11-04 07:11:24 +01:00
// run handpose
2021-03-21 12:49:55 +01:00
this.analyze('Start Hand:');
2020-11-04 07:11:24 +01:00
if (this.config.async) {
2021-04-25 22:56:10 +02:00
handRes = this.config.hand.enabled ? handpose.predict(process.tensor, this.config) : [];
2021-03-21 12:49:55 +01:00
if (this.perf.hand) delete this.perf.hand;
2020-11-04 07:11:24 +01:00
} else {
this.state = 'run:hand';
timeStamp = now();
2021-04-25 22:56:10 +02:00
handRes = this.config.hand.enabled ? await handpose.predict(process.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.perf.hand = elapsedTime;
2020-11-04 07:11:24 +01:00
}
2021-03-21 12:49:55 +01:00
this.analyze('End Hand:');
2020-11-04 07:11:24 +01:00
// run nanodet
2021-03-21 12:49:55 +01:00
this.analyze('Start Object:');
if (this.config.async) {
if (this.config.object.modelPath.includes('nanodet')) objectRes = this.config.object.enabled ? nanodet.predict(process.tensor, this.config) : [];
else if (this.config.object.modelPath.includes('centernet')) objectRes = this.config.object.enabled ? centernet.predict(process.tensor, this.config) : [];
2021-03-21 12:49:55 +01:00
if (this.perf.object) delete this.perf.object;
} else {
this.state = 'run:object';
timeStamp = now();
if (this.config.object.modelPath.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(process.tensor, this.config) : [];
else if (this.config.object.modelPath.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(process.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.perf.object = elapsedTime;
}
2021-03-21 12:49:55 +01:00
this.analyze('End Object:');
2020-11-06 17:39:39 +01:00
// if async wait for results
2021-05-25 14:58:20 +02:00
if (this.config.async) [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
2020-10-14 17:43:33 +02:00
// run gesture analysis last
2021-05-22 20:53:51 +02:00
let gestureRes: Gesture[] = [];
2020-11-04 16:18:22 +01:00
if (this.config.gesture.enabled) {
timeStamp = now();
2021-03-04 16:33:08 +01:00
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
2021-03-21 12:49:55 +01:00
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture) delete this.perf.gesture;
2020-11-04 16:18:22 +01:00
}
2021-03-21 12:49:55 +01:00
this.perf.total = Math.trunc(now() - timeStart);
2020-11-06 17:39:39 +01:00
this.state = 'idle';
2021-05-22 18:33:19 +02:00
const res = {
2021-03-17 19:35:11 +01:00
face: faceRes,
body: bodyRes,
hand: handRes,
gesture: gestureRes,
object: objectRes,
2021-03-21 12:49:55 +01:00
performance: this.perf,
2021-03-17 19:35:11 +01:00
canvas: process.canvas,
2021-05-22 18:41:29 +02:00
timestamp: Date.now(),
2021-05-25 14:58:20 +02:00
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes, process?.tensor?.shape); },
2021-03-17 19:35:11 +01:00
};
2021-05-25 14:58:20 +02:00
// finally dispose input tensor
tf.dispose(process.tensor);
2021-03-17 19:35:11 +01:00
// log('Result:', result);
2021-05-22 18:33:19 +02:00
resolve(res);
});
}
2020-11-09 20:26:10 +01:00
2021-03-14 04:31:09 +01:00
/** @hidden */
#warmupBitmap = async () => {
2020-12-16 20:49:14 +01:00
const b64toBlob = (base64, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());
let blob;
2021-01-12 14:24:00 +01:00
let res;
2020-12-16 20:49:14 +01:00
switch (this.config.warmup) {
case 'face': blob = await b64toBlob(sample.face); break;
case 'full': blob = await b64toBlob(sample.body); break;
default: blob = null;
}
2021-01-12 14:24:00 +01:00
if (blob) {
const bitmap = await createImageBitmap(blob);
2021-01-30 19:23:07 +01:00
res = await this.detect(bitmap, this.config);
2021-01-12 14:24:00 +01:00
bitmap.close();
}
return res;
}
2021-03-14 04:31:09 +01:00
/** @hidden */
#warmupCanvas = async () => new Promise((resolve) => {
let src;
let size = 0;
switch (this.config.warmup) {
case 'face':
size = 256;
src = 'data:image/jpeg;base64,' + sample.face;
break;
case 'full':
case 'body':
size = 1200;
src = 'data:image/jpeg;base64,' + sample.body;
break;
default:
src = null;
}
// src = encodeURI('../assets/human-sample-upper.jpg');
const img = new Image();
img.onload = async () => {
const canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(size, size) : document.createElement('canvas');
canvas.width = img.naturalWidth;
canvas.height = img.naturalHeight;
const ctx = canvas.getContext('2d');
ctx?.drawImage(img, 0, 0);
// const data = ctx?.getImageData(0, 0, canvas.height, canvas.width);
const res = await this.detect(canvas, this.config);
resolve(res);
};
if (src) img.src = src;
else resolve(null);
});
2021-01-12 14:24:00 +01:00
2021-03-14 04:31:09 +01:00
/** @hidden */
#warmupNode = async () => {
2021-01-30 19:23:07 +01:00
const atob = (str) => Buffer.from(str, 'base64');
2021-04-14 03:45:45 +02:00
let img;
if (this.config.warmup === 'face') img = atob(sample.face);
if (this.config.warmup === 'body' || this.config.warmup === 'full') img = atob(sample.body);
if (!img) return null;
2021-04-14 18:53:00 +02:00
let res;
if (typeof tf['node'] !== 'undefined') {
const data = tf['node'].decodeJpeg(img);
const expanded = data.expandDims(0);
this.tf.dispose(data);
// log('Input:', expanded);
res = await this.detect(expanded, this.config);
this.tf.dispose(expanded);
} else {
if (this.config.debug) log('Warmup tfjs-node not loaded');
/*
const input = await canvasJS.loadImage(img);
const canvas = canvasJS.createCanvas(input.width, input.height);
const ctx = canvas.getContext('2d');
ctx.drawImage(img, 0, 0, input.width, input.height);
res = await this.detect(input, this.config);
*/
}
2021-01-30 19:23:07 +01:00
return res;
}
2021-04-13 17:05:52 +02:00
/** Warmup metho pre-initializes all models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
*/
async warmup(userConfig: Config | Record<string, unknown> = {}): Promise<Result | { error }> {
2020-12-16 20:49:14 +01:00
const t0 = now();
2021-01-12 14:24:00 +01:00
if (userConfig) this.config = mergeDeep(this.config, userConfig);
2021-04-14 03:45:45 +02:00
if (!this.config.warmup || this.config.warmup === 'none') return { error: 'null' };
2021-01-12 14:24:00 +01:00
let res;
if (typeof createImageBitmap === 'function') res = await this.#warmupBitmap();
else if (typeof Image !== 'undefined') res = await this.#warmupCanvas();
else res = await this.#warmupNode();
2021-01-12 14:24:00 +01:00
const t1 = now();
2021-03-06 23:22:47 +01:00
if (this.config.debug) log('Warmup', this.config.warmup, Math.round(t1 - t0), 'ms', res);
2021-01-12 14:24:00 +01:00
return res;
2020-11-09 20:26:10 +01:00
}
2020-10-12 01:22:43 +02:00
}
2021-03-17 23:23:19 +01:00
/**
* Class Human is also available as default export
*/
export { Human as default };