human/types/human.d.ts

177 lines
6.6 KiB
TypeScript
Raw Normal View History

2021-03-17 23:57:00 +01:00
import * as tf from '../dist/tfjs.esm.js';
import * as facemesh from './blazeface/facemesh';
2021-03-21 19:18:51 +01:00
import * as faceres from './faceres/faceres';
2021-03-17 23:57:00 +01:00
import * as emotion from './emotion/emotion';
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
import * as blazepose from './blazepose/blazepose';
import * as nanodet from './nanodet/nanodet';
import * as draw from './draw/draw';
import { Config } from './config';
import { Result } from './result';
2021-03-18 01:26:43 +01:00
/** Generic Tensor object type */
export declare type Tensor = typeof tf.Tensor;
2021-03-17 23:57:00 +01:00
export type { Config } from './config';
export type { Result } from './result';
2021-04-13 17:05:52 +02:00
export type { DrawOptions } from './draw/draw';
2021-03-17 23:57:00 +01:00
/** Defines all possible input types for **Human** detection */
2021-04-06 17:38:01 +02:00
export declare type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
2021-03-17 23:57:00 +01:00
/** Error message */
export declare type Error = {
2021-03-18 01:16:40 +01:00
error: string;
2021-03-17 23:57:00 +01:00
};
2021-03-18 01:26:43 +01:00
/** Instance of TensorFlow/JS */
2021-03-17 23:57:00 +01:00
export declare type TensorFlow = typeof tf;
2021-03-18 01:26:43 +01:00
/** Generic Model object type, holds instance of individual models */
declare type Model = Object;
2021-03-17 23:57:00 +01:00
/**
* **Human** library main class
*
* All methods and properties are available only as members of Human class
*
* - Configuration object definition: {@link Config}
* - Results object definition: {@link Result}
* - Possible inputs: {@link Input}
*/
export declare class Human {
#private;
2021-04-13 17:05:52 +02:00
/** Current version of Human library in semver format */
2021-03-18 01:16:40 +01:00
version: string;
2021-04-13 17:05:52 +02:00
/** Current configuration
* - Details: {@link Config}
*/
2021-03-17 23:57:00 +01:00
config: Config;
2021-04-13 17:05:52 +02:00
/** Current state of Human library
* - Can be polled to determine operations that are currently executed
*/
2021-03-18 01:16:40 +01:00
state: string;
2021-04-13 17:05:52 +02:00
/** Internal: Instance of current image being processed */
2021-03-17 23:57:00 +01:00
image: {
2021-04-13 17:05:52 +02:00
tensor: Tensor | null;
canvas: OffscreenCanvas | HTMLCanvasElement | null;
2021-03-17 23:57:00 +01:00
};
2021-04-13 17:05:52 +02:00
/** Internal: Instance of TensorFlow/JS used by Human
* - Can be embedded or externally provided
*/
2021-03-17 23:57:00 +01:00
tf: TensorFlow;
2021-04-13 17:05:52 +02:00
/** Draw helper classes that can draw detected objects on canvas using specified draw styles
* - options: global settings for all draw operations, can be overriden for each draw method, for details see {@link DrawOptions}
* - face: draw detected faces
* - body: draw detected people and body parts
* - hand: draw detected hands and hand parts
* - canvas: draw processed canvas which is a processed copy of the input
* - all: meta-function that performs: canvas, face, body, hand
*/
2021-03-17 23:57:00 +01:00
draw: {
2021-04-13 17:05:52 +02:00
options: draw.DrawOptions;
2021-03-17 23:57:00 +01:00
gesture: typeof draw.gesture;
face: typeof draw.face;
body: typeof draw.body;
hand: typeof draw.hand;
canvas: typeof draw.canvas;
all: typeof draw.all;
};
2021-04-13 17:05:52 +02:00
/** Internal: Currently loaded models */
2021-03-17 23:57:00 +01:00
models: {
2021-03-26 23:50:19 +01:00
face: facemesh.MediaPipeFaceMesh | Model | null;
2021-04-24 22:04:49 +02:00
posenet: Model | null;
2021-03-17 23:57:00 +01:00
blazepose: Model | null;
2021-03-26 23:50:19 +01:00
efficientpose: Model | null;
2021-03-17 23:57:00 +01:00
handpose: handpose.HandPose | null;
iris: Model | null;
age: Model | null;
gender: Model | null;
emotion: Model | null;
embedding: Model | null;
nanodet: Model | null;
2021-03-21 19:18:51 +01:00
faceres: Model | null;
2021-03-17 23:57:00 +01:00
};
2021-04-13 17:05:52 +02:00
/** Internal: Currently loaded classes */
2021-03-17 23:57:00 +01:00
classes: {
facemesh: typeof facemesh;
emotion: typeof emotion;
body: typeof posenet | typeof blazepose;
hand: typeof handpose;
nanodet: typeof nanodet;
2021-03-21 19:18:51 +01:00
faceres: typeof faceres;
2021-03-17 23:57:00 +01:00
};
2021-04-13 17:05:52 +02:00
/** Face triangualtion array of 468 points, used for triangle references between points */
2021-03-29 21:59:16 +02:00
faceTriangulation: typeof facemesh.triangulation;
2021-04-13 17:05:52 +02:00
/** UV map of 468 values, used for 3D mapping of the face mesh */
2021-03-29 21:59:16 +02:00
faceUVMap: typeof facemesh.uvmap;
2021-04-13 17:05:52 +02:00
/** Platform and agent information detected by Human */
2021-03-17 23:57:00 +01:00
sysinfo: {
2021-03-18 01:16:40 +01:00
platform: string;
agent: string;
2021-03-17 23:57:00 +01:00
};
2021-04-13 17:05:52 +02:00
/** Performance object that contains values for all recently performed operations */
2021-03-21 12:49:55 +01:00
perf: any;
2021-04-13 17:05:52 +02:00
/**
* Creates instance of Human library that is futher used for all operations
* - @param userConfig: {@link Config}
*/
2021-03-17 23:57:00 +01:00
constructor(userConfig?: Config | Object);
2021-04-13 17:05:52 +02:00
/** Internal: ProfileData method returns last known profiling information
* - Requires human.config.profile set to true
*/
2021-03-17 23:57:00 +01:00
profileData(): {
newBytes: any;
newTensors: any;
peakBytes: any;
numKernelOps: any;
timeKernelOps: any;
slowestKernelOps: any;
largestKernelOps: any;
} | {};
2021-03-21 12:49:55 +01:00
/** @hidden */
analyze: (...msg: any[]) => void;
2021-04-13 17:05:52 +02:00
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
* - Calculation is based on normalized Minkowski distance between
*/
2021-03-21 19:18:51 +01:00
similarity(embedding1: Array<number>, embedding2: Array<number>): number;
2021-04-13 17:05:52 +02:00
/** Enhance method performs additional enhacements to face image previously detected for futher processing
* @param input Tensor as provided in human.result.face[n].tensor
* @returns Tensor
*/
2021-03-17 23:57:00 +01:00
enhance(input: Tensor): Tensor | null;
2021-04-13 17:05:52 +02:00
/**
* Math method find best match between provided face descriptor and predefined database of known descriptors
* @param faceEmbedding: face descriptor previsouly calculated on any face
* @param db: array of mapping of face descriptors to known values
* @param threshold: minimum score for matching to be considered in the result
* @returns best match
*/
2021-03-18 01:16:40 +01:00
match(faceEmbedding: Array<number>, db: Array<{
name: string;
source: string;
embedding: number[];
2021-03-17 23:57:00 +01:00
}>, threshold?: number): {
2021-03-18 01:16:40 +01:00
name: string;
source: string;
2021-03-21 19:18:51 +01:00
similarity: number;
2021-03-18 01:16:40 +01:00
embedding: number[];
2021-03-17 23:57:00 +01:00
};
2021-04-13 17:05:52 +02:00
/** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
*/
2021-03-17 23:57:00 +01:00
load(userConfig?: Config | Object): Promise<void>;
2021-04-13 17:05:52 +02:00
/** Main detection method
* - Analyze configuration: {@link Config}
* - Pre-process input: {@link Input}
* - Run inference for all configured models
* - Process and return result: {@link Result}
*/
2021-03-17 23:57:00 +01:00
detect(input: Input, userConfig?: Config | Object): Promise<Result | Error>;
2021-04-13 17:05:52 +02:00
/** Warmup metho pre-initializes all models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
*/
2021-03-17 23:57:00 +01:00
warmup(userConfig?: Config | Object): Promise<Result | {
error: any;
}>;
}
/**
* Class Human is also available as default export
*/
export { Human as default };