human/types/src/human.d.ts

223 lines
8.5 KiB
TypeScript
Raw Normal View History

2021-05-25 14:58:20 +02:00
/**
* Human main module
*/
2021-05-05 16:07:44 +02:00
import { Config } from './config';
2021-09-12 05:54:35 +02:00
import { Result } from './result';
2021-03-17 23:57:00 +01:00
import * as tf from '../dist/tfjs.esm.js';
import * as facemesh from './blazeface/facemesh';
import * as draw from './draw/draw';
2021-09-12 18:42:17 +02:00
import * as env from './env';
2021-06-18 15:16:21 +02:00
import { Tensor, GraphModel } from './tfjs/types';
2021-09-12 05:54:35 +02:00
export * from './config';
export * from './result';
2021-04-13 17:05:52 +02:00
export type { DrawOptions } from './draw/draw';
2021-09-12 18:42:17 +02:00
export { env } from './env';
2021-05-31 00:45:39 +02:00
/** Defines all possible input types for **Human** detection
* @typedef Input Type
2021-05-31 00:45:39 +02:00
*/
2021-04-06 17:38:01 +02:00
export declare type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
2021-09-12 06:30:11 +02:00
/** Events dispatched by `human.events`
* - `create`: triggered when Human object is instantiated
* - `load`: triggered when models are loaded (explicitly or on-demand)
* - `image`: triggered when input image is this.processed
* - `result`: triggered when detection is complete
* - `warmup`: triggered when warmup is complete
*/
export declare type Events = 'create' | 'load' | 'image' | 'result' | 'warmup';
2021-05-31 00:45:39 +02:00
/** Error message
* @typedef Error Type
2021-05-31 00:45:39 +02:00
*/
2021-03-17 23:57:00 +01:00
export declare type Error = {
2021-03-18 01:16:40 +01:00
error: string;
2021-03-17 23:57:00 +01:00
};
2021-06-07 01:00:34 +02:00
/** Instance of TensorFlow/JS
* @external
*/
2021-03-17 23:57:00 +01:00
export declare type TensorFlow = typeof tf;
/**
* **Human** library main class
*
* All methods and properties are available only as members of Human class
*
* - Configuration object definition: {@link Config}
* - Results object definition: {@link Result}
* - Possible inputs: {@link Input}
2021-05-31 00:45:39 +02:00
*
* @param userConfig: {@link Config}
2021-03-17 23:57:00 +01:00
*/
export declare class Human {
#private;
/** Current version of Human library in *semver* format */
2021-09-11 03:21:29 +02:00
version: string;
2021-04-13 17:05:52 +02:00
/** Current configuration
* - Details: {@link Config}
*/
2021-03-17 23:57:00 +01:00
config: Config;
2021-05-30 00:29:57 +02:00
/** Last known result of detect run
* - Can be accessed anytime after initial detection
*/
result: Result;
2021-04-13 17:05:52 +02:00
/** Current state of Human library
* - Can be polled to determine operations that are currently executed
* - Progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
2021-04-13 17:05:52 +02:00
*/
2021-03-18 01:16:40 +01:00
state: string;
2021-09-11 22:11:00 +02:00
/** currenty processed image tensor and canvas */
process: {
2021-04-13 17:05:52 +02:00
tensor: Tensor | null;
canvas: OffscreenCanvas | HTMLCanvasElement | null;
2021-03-17 23:57:00 +01:00
};
2021-05-31 00:45:39 +02:00
/** @internal: Instance of TensorFlow/JS used by Human
2021-04-13 17:05:52 +02:00
* - Can be embedded or externally provided
*/
2021-03-17 23:57:00 +01:00
tf: TensorFlow;
2021-09-12 18:42:17 +02:00
/**
* Object containing environment information used for diagnostics
*/
env: env.Env;
2021-06-18 15:16:21 +02:00
/** Draw helper classes that can draw detected objects on canvas using specified draw
* - options: {@link DrawOptions} global settings for all draw operations, can be overriden for each draw method
2021-04-13 17:05:52 +02:00
* - face: draw detected faces
* - body: draw detected people and body parts
* - hand: draw detected hands and hand parts
2021-09-11 22:11:00 +02:00
* - canvas: draw this.processed canvas which is a this.processed copy of the input
2021-04-13 17:05:52 +02:00
* - all: meta-function that performs: canvas, face, body, hand
*/
2021-03-17 23:57:00 +01:00
draw: {
2021-04-13 17:05:52 +02:00
options: draw.DrawOptions;
2021-03-17 23:57:00 +01:00
gesture: typeof draw.gesture;
face: typeof draw.face;
body: typeof draw.body;
hand: typeof draw.hand;
canvas: typeof draw.canvas;
all: typeof draw.all;
};
2021-05-31 00:45:39 +02:00
/** @internal: Currently loaded models */
2021-03-17 23:57:00 +01:00
models: {
2021-06-18 15:16:21 +02:00
face: [unknown, GraphModel | null, GraphModel | null] | null;
posenet: GraphModel | null;
blazepose: GraphModel | null;
efficientpose: GraphModel | null;
movenet: GraphModel | null;
handpose: [GraphModel | null, GraphModel | null] | null;
age: GraphModel | null;
gender: GraphModel | null;
emotion: GraphModel | null;
embedding: GraphModel | null;
nanodet: GraphModel | null;
centernet: GraphModel | null;
faceres: GraphModel | null;
segmentation: GraphModel | null;
2021-03-17 23:57:00 +01:00
};
2021-09-11 22:11:00 +02:00
/** Container for events dispatched by Human
*
* Possible events:
* - `create`: triggered when Human object is instantiated
* - `load`: triggered when models are loaded (explicitly or on-demand)
* - `image`: triggered when input image is this.processed
* - `result`: triggered when detection is complete
* - `warmup`: triggered when warmup is complete
*/
events: EventTarget;
/** Reference face triangualtion array of 468 points, used for triangle references between points */
2021-03-29 21:59:16 +02:00
faceTriangulation: typeof facemesh.triangulation;
/** Refernce UV map of 468 values, used for 3D mapping of the face mesh */
2021-03-29 21:59:16 +02:00
faceUVMap: typeof facemesh.uvmap;
2021-04-13 17:05:52 +02:00
/** Performance object that contains values for all recently performed operations */
2021-06-03 15:41:53 +02:00
performance: Record<string, number>;
initial: boolean;
2021-04-13 17:05:52 +02:00
/**
* Creates instance of Human library that is futher used for all operations
2021-05-31 00:45:39 +02:00
* @param userConfig: {@link Config}
2021-04-13 17:05:52 +02:00
*/
2021-09-11 22:11:00 +02:00
constructor(userConfig?: Partial<Config>);
2021-03-21 12:49:55 +01:00
/** @hidden */
2021-08-17 14:51:17 +02:00
analyze: (...msg: string[]) => void;
/** Process input as return canvas and tensor
*
* @param input: {@link Input}
* @returns { tensor, canvas }
*/
image: (input: Input) => {
tensor: Tensor<import("@tensorflow/tfjs-core").Rank> | null;
canvas: OffscreenCanvas | HTMLCanvasElement;
};
2021-04-13 17:05:52 +02:00
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
* - Calculation is based on normalized Minkowski distance between
*
2021-05-31 00:45:39 +02:00
* @param embedding1: face descriptor as array of numbers
* @param embedding2: face descriptor as array of numbers
* @returns similarity: number
2021-04-13 17:05:52 +02:00
*/
2021-03-21 19:18:51 +01:00
similarity(embedding1: Array<number>, embedding2: Array<number>): number;
/**
2021-09-11 22:11:00 +02:00
* Segmentation method takes any input and returns this.processed canvas with body segmentation
* Optional parameter background is used to fill the background with specific input
2021-09-11 22:11:00 +02:00
* Segmentation is not triggered as part of detect this.process
*
* @param input: {@link Input}
* @param background?: {@link Input}
* @returns Canvas
*/
segmentation(input: Input, background?: Input): Promise<OffscreenCanvas | HTMLCanvasElement | null>;
2021-09-11 22:11:00 +02:00
/** Enhance method performs additional enhacements to face image previously detected for futher this.processing
2021-05-31 00:45:39 +02:00
* @param input: Tensor as provided in human.result.face[n].tensor
2021-04-13 17:05:52 +02:00
* @returns Tensor
*/
2021-03-17 23:57:00 +01:00
enhance(input: Tensor): Tensor | null;
2021-05-31 00:45:39 +02:00
/** Math method find best match between provided face descriptor and predefined database of known descriptors
2021-04-13 17:05:52 +02:00
* @param faceEmbedding: face descriptor previsouly calculated on any face
* @param db: array of mapping of face descriptors to known values
* @param threshold: minimum score for matching to be considered in the result
* @returns best match
*/
2021-03-18 01:16:40 +01:00
match(faceEmbedding: Array<number>, db: Array<{
name: string;
source: string;
embedding: number[];
2021-03-17 23:57:00 +01:00
}>, threshold?: number): {
2021-03-18 01:16:40 +01:00
name: string;
source: string;
2021-03-21 19:18:51 +01:00
similarity: number;
2021-03-18 01:16:40 +01:00
embedding: number[];
2021-03-17 23:57:00 +01:00
};
2021-04-13 17:05:52 +02:00
/** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
* @param userConfig?: {@link Config}
2021-04-13 17:05:52 +02:00
*/
2021-09-11 22:11:00 +02:00
load(userConfig?: Partial<Config>): Promise<void>;
/** @hidden */
emit: (event: string) => boolean;
/**
* Runs interpolation using last known result and returns smoothened result
* Interpolation is based on time since last known result so can be called independently
*
* @param result?: {@link Result} optional use specific result set to run interpolation on
* @returns result: {@link Result}
*/
next: (result?: Result | undefined) => Result;
/** Warmup method pre-initializes all configured models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
* @param userConfig?: {@link Config}
*/
warmup: (userConfig?: Partial<Config> | undefined) => Promise<Result | {
error: any;
}>;
2021-04-13 17:05:52 +02:00
/** Main detection method
* - Analyze configuration: {@link Config}
2021-09-11 22:11:00 +02:00
* - Pre-this.process input: {@link Input}
2021-04-13 17:05:52 +02:00
* - Run inference for all configured models
* - Process and return result: {@link Result}
*
* @param input: {@link Input}
* @param userConfig?: {@link Config}
* @returns result: {@link Result}
2021-04-13 17:05:52 +02:00
*/
2021-09-11 22:11:00 +02:00
detect(input: Input, userConfig?: Partial<Config>): Promise<Result | Error>;
2021-03-17 23:57:00 +01:00
}
/**
* Class Human is also available as default export
*/
export { Human as default };