update typedoc definitions

pull/280/head
Vladimir Mandic 2021-05-30 18:45:39 -04:00
parent c9446e93cb
commit 5761eb282a
10 changed files with 35 additions and 14 deletions

View File

@ -11,6 +11,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
### **HEAD -> main** 2021/05/30 mandic00@live.com
- implemented service worker
- quantized centernet
- release candidate
- added usage restrictions

View File

@ -8,6 +8,7 @@
<meta name="application-name" content="Human">
<meta name="description" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="msapplication-tooltip" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="theme-color" content="#000000">
<link rel="manifest" href="./manifest.webmanifest">
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
<link rel="apple-touch-icon" href="../assets/icon.png">

View File

@ -1,5 +1,4 @@
/* eslint-disable max-len */
// @ts-nocheck
// based on: https://github.com/munrocket/gl-bench
const UICSS = `

View File

@ -8,6 +8,7 @@
<meta name="application-name" content="Human">
<meta name="description" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="msapplication-tooltip" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="theme-color" content="#000000">
<link rel="manifest" href="./manifest.webmanifest">
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
<link rel="apple-touch-icon" href="../assets/icon.png">

View File

@ -30,6 +30,8 @@ let human;
const userConfig = {
warmup: 'none',
backend: 'webgl',
wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.6.0/dist/',
/*
async: false,
cacheSensitivity: 0,
filter: {
@ -49,6 +51,7 @@ const userConfig = {
body: { enabled: false, modelPath: 'movenet-lightning.json' },
object: { enabled: false },
gesture: { enabled: true },
*/
};
const drawOptions = {

View File

@ -9,6 +9,7 @@
<meta name="keywords" content="Human">
<meta name="description" content="Human; Author: Vladimir Mandic <mandic00@live.com>">
<meta name="msapplication-tooltip" content="Human; Author: Vladimir Mandic <mandic00@live.com>">
<meta name="theme-color" content="#000000">
<link rel="manifest" href="manifest.webmanifest">
<link rel="shortcut icon" href="/favicon.ico" type="image/x-icon">
<link rel="icon" sizes="256x256" href="../assets/icon.png">

View File

@ -5,6 +5,7 @@
* Configuration interface definition for **Human** library
*
* Contains all configurable parameters
* @typedef Config
*/
export interface Config {
/** Backend used for TFJS operations */
@ -194,7 +195,7 @@ const config: Config = {
backend: 'webgl', // select tfjs backend to use, leave empty to use default backend
// can be 'webgl', 'wasm', 'cpu', or 'humangl' which is a custom version of webgl
modelBasePath: '../models/', // base path for all models
wasmPath: '../node_modules/@tensorflow/tfjs-backend-wasm/dist//', // path for wasm binaries, only used for backend: wasm
wasmPath: '../node_modules/@tensorflow/tfjs-backend-wasm/dist/', // path for wasm binaries, only used for backend: wasm
debug: true, // print additional status messages to console
async: true, // execute enabled models in parallel
warmup: 'full', // what to use for human.warmup(), can be 'none', 'face', 'full'

View File

@ -32,10 +32,14 @@ export type { Config } from './config';
export type { Result, Face, Hand, Body, Item, Gesture } from './result';
export type { DrawOptions } from './draw/draw';
/** Defines all possible input types for **Human** detection */
/** Defines all possible input types for **Human** detection
* @typedef Input
*/
export type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
/** Error message */
/** Error message
* @typedef Error
*/
export type Error = { error: string };
/** Instance of TensorFlow/JS */
@ -52,6 +56,8 @@ type Model = unknown;
* - Configuration object definition: {@link Config}
* - Results object definition: {@link Result}
* - Possible inputs: {@link Input}
*
* @param userConfig: {@link Config}
*/
export class Human {
/** Current version of Human library in semver format */
@ -68,9 +74,9 @@ export class Human {
* - Can be polled to determine operations that are currently executed
*/
state: string;
/** Internal: Instance of current image being processed */
/** @internal: Instance of current image being processed */
image: { tensor: Tensor | null, canvas: OffscreenCanvas | HTMLCanvasElement | null };
/** Internal: Instance of TensorFlow/JS used by Human
/** @internal: Instance of TensorFlow/JS used by Human
* - Can be embedded or externally provided
*/
tf: TensorFlow;
@ -91,7 +97,7 @@ export class Human {
canvas: typeof draw.canvas,
all: typeof draw.all,
};
/** Internal: Currently loaded models */
/** @internal: Currently loaded models */
models: {
face: [Model, Model, Model] | null,
posenet: Model | null,
@ -108,7 +114,7 @@ export class Human {
centernet: Model | null,
faceres: Model | null,
};
/** Internal: Currently loaded classes */
/** @internal: Currently loaded classes */
classes: {
facemesh: typeof facemesh;
emotion: typeof emotion;
@ -137,7 +143,7 @@ export class Human {
/**
* Creates instance of Human library that is futher used for all operations
* - @param userConfig: {@link Config}
* @param userConfig: {@link Config}
*/
constructor(userConfig: Config | Record<string, unknown> = {}) {
this.tf = tf;
@ -215,6 +221,9 @@ export class Human {
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
* - Calculation is based on normalized Minkowski distance between
* @param embedding1: face descriptor as array of numbers
* @param embedding2: face descriptor as array of numbers
* @returns similarity: number
*/
// eslint-disable-next-line class-methods-use-this
similarity(embedding1: Array<number>, embedding2: Array<number>): number {
@ -222,7 +231,7 @@ export class Human {
}
/** Enhance method performs additional enhacements to face image previously detected for futher processing
* @param input Tensor as provided in human.result.face[n].tensor
* @param input: Tensor as provided in human.result.face[n].tensor
* @returns Tensor
*/
// eslint-disable-next-line class-methods-use-this
@ -231,8 +240,7 @@ export class Human {
return faceres.enhance(input);
}
/**
* Math method find best match between provided face descriptor and predefined database of known descriptors
/** Math method find best match between provided face descriptor and predefined database of known descriptors
* @param faceEmbedding: face descriptor previsouly calculated on any face
* @param db: array of mapping of face descriptors to known values
* @param threshold: minimum score for matching to be considered in the result
@ -245,6 +253,7 @@ export class Human {
/** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
* @param userConfig: {@link Config}
*/
async load(userConfig: Config | Record<string, unknown> = {}) {
this.state = 'load';
@ -404,6 +413,9 @@ export class Human {
* - Pre-process input: {@link Input}
* - Run inference for all configured models
* - Process and return result: {@link Result}
* @param input: Input
* @param userConfig: Config
* @returns result: Result
*/
async detect(input: Input, userConfig: Config | Record<string, unknown> = {}): Promise<Result | Error> {
// detection happens inside a promise
@ -654,6 +666,7 @@ export class Human {
/** Warmup metho pre-initializes all models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
* @param userConfig: Config
*/
async warmup(userConfig: Config | Record<string, unknown> = {}): Promise<Result | { error }> {
const t0 = now();

View File

@ -1,5 +1,5 @@
/**
* Type definitions for Human results
* Type definitions for Human result object
*/
import { Tensor } from '../dist/tfjs.esm.js';

View File

@ -5,6 +5,7 @@ const config = {
modelBasePath: 'http://localhost:10030/models/',
backend: 'wasm',
wasmPath: 'node_modules/@tensorflow/tfjs-backend-wasm/dist/',
// wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.6.0/dist/',
debug: false,
async: false,
filter: {