human/types/src/config.d.ts

258 lines
9.9 KiB
TypeScript
Raw Normal View History

2021-10-22 22:09:52 +02:00
export interface GenericConfig {
enabled: boolean;
2021-09-12 06:05:06 +02:00
modelPath: string;
2021-10-22 22:09:52 +02:00
skipFrames: number;
skipTime: number;
}
/** Dectector part of face configuration */
export interface FaceDetectorConfig extends GenericConfig {
2021-09-12 06:05:06 +02:00
rotation: boolean;
maxDetected: number;
minConfidence: number;
iouThreshold: number;
return: boolean;
}
2021-09-22 22:00:43 +02:00
/** Mesh part of face configuration */
2021-10-22 22:09:52 +02:00
export declare type FaceMeshConfig = GenericConfig;
2021-09-22 22:00:43 +02:00
/** Iris part of face configuration */
2021-10-22 22:09:52 +02:00
export declare type FaceIrisConfig = GenericConfig;
2021-09-22 22:00:43 +02:00
/** Description or face embedding part of face configuration
* - also used by age and gender detection
*/
2021-10-22 22:09:52 +02:00
export interface FaceDescriptionConfig extends GenericConfig {
2021-09-12 06:05:06 +02:00
minConfidence: number;
}
2021-09-22 22:00:43 +02:00
/** Emotion part of face configuration */
2021-10-22 22:09:52 +02:00
export interface FaceEmotionConfig extends GenericConfig {
2021-09-12 06:05:06 +02:00
minConfidence: number;
}
2021-10-13 16:56:56 +02:00
/** Emotion part of face configuration */
2021-10-22 22:09:52 +02:00
export declare type FaceAntiSpoofConfig = GenericConfig;
2021-09-12 05:54:35 +02:00
/** Controlls and configures all face-specific options:
* - face detection, face mesh detection, age, gender, emotion detection and face description
2021-09-22 22:00:43 +02:00
*
2021-09-12 05:54:35 +02:00
* Parameters:
* - enabled: true/false
* - modelPath: path for each of face models
* - minConfidence: threshold for discarding a prediction
* - iouThreshold: ammount of overlap between two detected objects before one object is removed
* - maxDetected: maximum number of faces detected in the input, should be set to the minimum number for performance
* - rotation: use calculated rotated face image or just box with rotation as-is, false means higher performance, but incorrect mesh mapping on higher face angles
* - return: return extracted face as tensor for futher user processing, in which case user is reponsible for manually disposing the tensor
*/
export interface FaceConfig {
enabled: boolean;
2021-09-12 06:05:06 +02:00
detector: Partial<FaceDetectorConfig>;
mesh: Partial<FaceMeshConfig>;
iris: Partial<FaceIrisConfig>;
description: Partial<FaceDescriptionConfig>;
emotion: Partial<FaceEmotionConfig>;
2021-10-13 16:56:56 +02:00
antispoof: Partial<FaceAntiSpoofConfig>;
2021-09-12 05:54:35 +02:00
}
/** Controlls and configures all body detection specific options
2021-09-22 22:00:43 +02:00
*
* Parameters:
2021-09-12 05:54:35 +02:00
* - enabled: true/false
* - modelPath: body pose model, can be absolute path or relative to modelBasePath
* - minConfidence: threshold for discarding a prediction
* - maxDetected: maximum number of people detected in the input, should be set to the minimum number for performance
2021-09-27 19:58:13 +02:00
* - detector: optional body detector
2021-09-22 22:00:43 +02:00
*
2021-09-26 01:14:03 +02:00
* `maxDetected` is valid for `posenet` and `movenet-multipose` as other models are single-pose only
* `maxDetected` can be set to -1 to auto-detect based on number of detected faces
*
2021-09-22 22:00:43 +02:00
* Changing `modelPath` will change module responsible for hand detection and tracking
2021-09-26 01:14:03 +02:00
* Allowed values are `posenet.json`, `blazepose.json`, `efficientpose.json`, `movenet-lightning.json`, `movenet-thunder.json`, `movenet-multipose.json`
2021-09-12 05:54:35 +02:00
*/
2021-10-22 22:09:52 +02:00
export interface BodyConfig extends GenericConfig {
2021-09-12 05:54:35 +02:00
maxDetected: number;
minConfidence: number;
2021-09-27 19:58:13 +02:00
detector?: {
modelPath: string;
};
2021-09-12 05:54:35 +02:00
}
2021-10-22 22:09:52 +02:00
/** Controls and configures all hand detection specific options
2021-09-22 22:00:43 +02:00
*
* Parameters:
2021-09-12 05:54:35 +02:00
* - enabled: true/false
* - landmarks: detect hand landmarks or just hand boundary box
* - modelPath: paths for hand detector and hand skeleton models, can be absolute path or relative to modelBasePath
* - minConfidence: threshold for discarding a prediction
* - iouThreshold: ammount of overlap between two detected objects before one object is removed
* - maxDetected: maximum number of hands detected in the input, should be set to the minimum number for performance
* - rotation: use best-guess rotated hand image or just box with rotation as-is, false means higher performance, but incorrect finger mapping if hand is inverted
2021-09-22 22:00:43 +02:00
*
2021-09-26 01:14:03 +02:00
* `maxDetected` can be set to -1 to auto-detect based on number of detected faces
*
2021-09-22 22:00:43 +02:00
* Changing `detector.modelPath` will change module responsible for hand detection and tracking
* Allowed values are `handdetect.json` and `handtrack.json`
2021-09-12 05:54:35 +02:00
*/
2021-10-22 22:09:52 +02:00
export interface HandConfig extends GenericConfig {
2021-09-12 05:54:35 +02:00
rotation: boolean;
minConfidence: number;
iouThreshold: number;
maxDetected: number;
landmarks: boolean;
detector: {
2021-09-12 06:05:06 +02:00
modelPath?: string;
2021-09-12 05:54:35 +02:00
};
skeleton: {
2021-09-12 06:05:06 +02:00
modelPath?: string;
2021-09-12 05:54:35 +02:00
};
}
/** Controlls and configures all object detection specific options
* - enabled: true/false
* - modelPath: object detection model, can be absolute path or relative to modelBasePath
* - minConfidence: minimum score that detection must have to return as valid object
* - iouThreshold: ammount of overlap between two detected objects before one object is removed
* - maxDetected: maximum number of detections to return
2021-09-22 22:00:43 +02:00
*
* Changing `modelPath` will change module responsible for hand detection and tracking
* Allowed values are `mb3-centernet.json` and `nanodet.json`
2021-09-12 05:54:35 +02:00
*/
2021-10-22 22:09:52 +02:00
export interface ObjectConfig extends GenericConfig {
2021-09-12 05:54:35 +02:00
minConfidence: number;
iouThreshold: number;
maxDetected: number;
}
/** Controlls and configures all body segmentation module
* removes background from input containing person
* if segmentation is enabled it will run as preprocessing task before any other model
* alternatively leave it disabled and use it on-demand using human.segmentation method which can
* remove background or replace it with user-provided background
*
* - enabled: true/false
* - modelPath: object detection model, can be absolute path or relative to modelBasePath
2021-09-22 22:00:43 +02:00
* - blur: blur segmentation output by <number> pixels for more realistic image
*
* Changing `modelPath` will change module responsible for hand detection and tracking
* Allowed values are `selfie.json` and `meet.json`
2021-09-12 05:54:35 +02:00
*/
export interface SegmentationConfig {
enabled: boolean;
modelPath: string;
2021-09-22 21:16:14 +02:00
blur: number;
2021-09-12 05:54:35 +02:00
}
/** Run input through image filters before inference
2021-09-22 22:00:43 +02:00
* - available only in Browser environments
* - image filters run with near-zero latency as they are executed on the GPU using WebGL
2021-09-12 05:54:35 +02:00
*/
export interface FilterConfig {
enabled: boolean;
/** Resize input width
* - if both width and height are set to 0, there is no resizing
* - if just one is set, second one is scaled automatically
* - if both are set, values are used as-is
*/
width: number;
/** Resize input height
* - if both width and height are set to 0, there is no resizing
* - if just one is set, second one is scaled automatically
* - if both are set, values are used as-is
*/
height: number;
/** Return processed canvas imagedata in result */
return: boolean;
/** Flip input as mirror image */
flip: boolean;
/** Range: -1 (darken) to 1 (lighten) */
brightness: number;
/** Range: -1 (reduce contrast) to 1 (increase contrast) */
contrast: number;
/** Range: 0 (no sharpening) to 1 (maximum sharpening) */
sharpness: number;
/** Range: 0 (no blur) to N (blur radius in pixels) */
blur: number;
/** Range: -1 (reduce saturation) to 1 (increase saturation) */
saturation: number;
/** Range: 0 (no change) to 360 (hue rotation in degrees) */
hue: number;
/** Image negative */
negative: boolean;
/** Image sepia colors */
sepia: boolean;
/** Image vintage colors */
vintage: boolean;
/** Image kodachrome colors */
kodachrome: boolean;
/** Image technicolor colors */
technicolor: boolean;
/** Image polaroid camera effect */
polaroid: boolean;
/** Range: 0 (no pixelate) to N (number of pixels to pixelate) */
pixelate: number;
}
/** Controlls gesture detection */
export interface GestureConfig {
enabled: boolean;
}
2021-09-12 06:30:11 +02:00
/**
* Configuration interface definition for **Human** library
*
* Contains all configurable parameters
* @typedef Config
2021-09-24 15:55:27 +02:00
*
* Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L292)
2021-09-12 06:30:11 +02:00
*/
2021-03-17 23:57:00 +01:00
export interface Config {
2021-09-24 15:55:27 +02:00
/** Backend used for TFJS operations
* Valid build-in backends are:
* - Browser: `cpu`, `wasm`, `webgl`, `humangl`
* - NodeJS: `cpu`, `wasm`, `tensorflow`
*
* Experimental:
* - Browser: `webgpu` - requires custom build of `tfjs-backend-webgpu`
*
* Defaults: `humangl` for browser and `tensorflow` for nodejs
*/
2021-09-13 19:28:35 +02:00
backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu';
2021-09-24 15:55:27 +02:00
/** Path to *.wasm files if backend is set to `wasm`
* - if not set, auto-detects to link to CDN `jsdelivr` when running in browser
*/
2021-03-18 01:16:40 +01:00
wasmPath: string;
2021-04-13 17:05:52 +02:00
/** Print debug statements to console */
2021-03-18 01:16:40 +01:00
debug: boolean;
2021-04-13 17:05:52 +02:00
/** Perform model loading and inference concurrently or sequentially */
2021-03-18 01:16:40 +01:00
async: boolean;
2021-04-13 17:05:52 +02:00
/** What to use for `human.warmup()`
* - warmup pre-initializes all models for faster inference but can take significant time on startup
*/
2021-09-13 19:28:35 +02:00
warmup: 'none' | 'face' | 'full' | 'body';
2021-04-13 17:05:52 +02:00
/** Base model path (typically starting with file://, http:// or https://) for all models
2021-04-25 19:16:04 +02:00
* - individual modelPath values are relative to this path
2021-04-13 17:05:52 +02:00
*/
2021-04-09 14:07:58 +02:00
modelBasePath: string;
/** Cache sensitivity
* - values 0..1 where 0.01 means reset cache if input changed more than 1%
* - set to 0 to disable caching
*/
cacheSensitivity: number;
2021-09-21 04:06:49 +02:00
/** Internal Variable */
2021-10-23 15:38:52 +02:00
skipAllowed: boolean;
2021-04-13 17:05:52 +02:00
/** Run input through image filters before inference
* - image filters run with near-zero latency as they are executed on the GPU
2021-09-24 15:55:27 +02:00
*
* {@link FilterConfig}
2021-04-13 17:05:52 +02:00
*/
2021-09-12 05:54:35 +02:00
filter: Partial<FilterConfig>;
2021-09-24 15:55:27 +02:00
/** {@link GestureConfig} */
2021-09-12 05:54:35 +02:00
gesture: Partial<GestureConfig>;
2021-09-24 15:55:27 +02:00
/** {@link FaceConfig} */
2021-09-12 05:54:35 +02:00
face: Partial<FaceConfig>;
2021-09-24 15:55:27 +02:00
/** {@link BodyConfig} */
2021-09-12 05:54:35 +02:00
body: Partial<BodyConfig>;
2021-09-24 15:55:27 +02:00
/** {@link HandConfig} */
2021-09-12 05:54:35 +02:00
hand: Partial<HandConfig>;
2021-09-24 15:55:27 +02:00
/** {@link ObjectConfig} */
2021-09-12 05:54:35 +02:00
object: Partial<ObjectConfig>;
2021-09-24 15:55:27 +02:00
/** {@link SegmentationConfig} */
2021-09-12 05:54:35 +02:00
segmentation: Partial<SegmentationConfig>;
2021-03-17 23:57:00 +01:00
}
2021-09-12 06:30:11 +02:00
/**
2021-09-12 18:42:17 +02:00
* [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L244)
2021-09-12 06:30:11 +02:00
*
*/
2021-03-17 23:57:00 +01:00
declare const config: Config;
export { config as defaults };
2021-09-13 19:28:35 +02:00
//# sourceMappingURL=config.d.ts.map