create typedef rollup

pull/356/head
Vladimir Mandic 2021-11-17 15:45:49 -05:00
parent 8d05c1089e
commit eb65cabf31
20 changed files with 225 additions and 153 deletions

View File

@ -11,7 +11,7 @@
"serve": ["serve"]
},
"clean": {
"locations": ["dist/*", "types/*", "typedoc/*"]
"locations": ["dist/*", "types/lib/*", "typedoc/*"]
},
"lint": {
"locations": [ "*.json", "src/**/*.ts", "test/**/*.js", "demo/**/*.js" ],
@ -139,7 +139,7 @@
"sourcemap": true,
"minify": false,
"external": ["fs", "os", "buffer", "util"],
"typings": "types",
"typings": "types/lib",
"typedoc": "typedoc"
},
{

View File

@ -33,7 +33,8 @@
"dist",
"media",
"models",
"node_modules"
"node_modules",
"types/human.d.ts"
],
"rules": {
"@typescript-eslint/ban-ts-comment": "off",

1
.gitignore vendored
View File

@ -2,3 +2,4 @@ node_modules
pnpm-lock.yaml
assets/tf*
*.swp
types/lib

View File

@ -4,3 +4,4 @@ samples
typedoc
test
wiki
types/lib

View File

@ -11,6 +11,7 @@
### **HEAD -> main** 2021/11/16 mandic00@live.com
- cache frequent tf constants
- add extra face rotation prior to mesh
- release 2.5.2
- improve error handling

11
TODO.md
View File

@ -60,3 +60,14 @@ Other:
- Documentation overhaul
- Fix for optional `gear`, `ssrnet`, `mobilefacenet` modules
- Fix for Firefox WebGPU compatibility issue
```
cp tfjs/tfjs.esm.d.ts types/lib/dist/
node_modules/.bin/api-extractor run --local --verbose
cp types/human.d.ts dist/human.esm-nobundle.d.ts
cp types/human.d.ts dist/human.esm.d.ts
cp types/human.d.ts dist/human.d.ts
cp types/human.d.ts dist/human.node-gpu.d.ts
cp types/human.d.ts dist/human.node.d.ts
cp types/human.d.ts dist/human.node-wasm.d.ts
```

38
api-extractor.json Normal file
View File

@ -0,0 +1,38 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
"mainEntryPointFilePath": "types/lib/src/human.d.ts",
"bundledPackages": ["@tensorflow/tfjs-core", "@tensorflow/tfjs-converter", "long", "@types/offscreencanvas"],
"compiler": {
"skipLibCheck": false
},
"newlineKind": "lf",
"dtsRollup": {
"enabled": true,
"untrimmedFilePath": "types/human.d.ts"
},
"docModel": { "enabled": false },
"tsdocMetadata": {
"enabled": false
},
"apiReport": { "enabled": false },
"messages": {
"compilerMessageReporting": {
"default": {
"logLevel": "warning"
}
},
"extractorMessageReporting": {
"default": {
"logLevel": "warning"
},
"ae-missing-release-tag": {
"logLevel": "none"
}
},
"tsdocMessageReporting": {
"default": {
"logLevel": "warning"
}
}
}
}

46
build.js Normal file
View File

@ -0,0 +1,46 @@
const fs = require('fs');
const log = require('@vladmandic/pilogger');
const Build = require('@vladmandic/build').Build;
const APIExtractor = require('@microsoft/api-extractor');
function copy(src, dst) {
if (!fs.existsSync(src)) return;
const buffer = fs.readFileSync(src);
fs.writeFileSync(dst, buffer);
}
const apiIgnoreList = ['ae-forgotten-export', 'ae-unresolved-link'];
async function main() {
// run production build
const build = new Build();
await build.run('production');
// patch tfjs typedefs
log.state('Copy:', { input: 'tfjs/tfjs.esm.d.ts' });
copy('tfjs/tfjs.esm.d.ts', 'types/lib/dist/tfjs.esm.d.ts');
// run api-extractor to create typedef rollup
const extractorConfig = APIExtractor.ExtractorConfig.loadFileAndPrepare('api-extractor.json');
const extractorResult = APIExtractor.Extractor.invoke(extractorConfig, {
localBuild: true,
showVerboseMessages: false,
messageCallback: (msg) => {
msg.handled = true;
if (msg.logLevel === 'none' || msg.logLevel === 'verbose' || msg.logLevel === 'info') return;
if (msg.sourceFilePath?.includes('/node_modules/')) return;
if (apiIgnoreList.reduce((prev, curr) => prev || msg.messageId.includes(curr), false)) return;
log.data('API', { level: msg.logLevel, category: msg.category, id: msg.messageId, file: msg.sourceFilePath, line: msg.sourceFileLine, text: msg.text });
},
});
log.state('API-Extractor:', { succeeeded: extractorResult.succeeded, errors: extractorResult.errorCount, warnings: extractorResult.warningCount });
// distribute typedefs
log.state('Copy:', { input: 'types/human.d.ts' });
copy('types/human.d.ts', 'dist/human.esm-nobundle.d.ts');
copy('types/human.d.ts', 'dist/human.esm.d.ts');
copy('types/human.d.ts', 'dist/human.d.ts');
copy('types/human.d.ts', 'dist/human.node-gpu.d.ts');
copy('types/human.d.ts', 'dist/human.node.d.ts');
copy('types/human.d.ts', 'dist/human.node-wasm.d.ts');
log.info('Human Build complete...');
}
main();

View File

@ -6,7 +6,7 @@
"main": "dist/human.node.js",
"module": "dist/human.esm.js",
"browser": "dist/human.esm.js",
"types": "types/src/human.d.ts",
"types": "types/human.d.ts",
"author": "Vladimir Mandic <mandic00@live.com>",
"bugs": {
"url": "https://github.com/vladmandic/human/issues"
@ -23,7 +23,7 @@
"scripts": {
"start": "node --no-warnings demo/nodejs/node.js",
"dev": "build --profile development",
"build": "rimraf test/build.log && build --profile production && cp src/tfjs/tfjs.esm.d.ts types/dist/ && cp src/human.d.ts dist/human.esm.d.ts",
"build": "rimraf test/build.log && node build.js",
"test": "node --no-warnings --unhandled-rejections=strict --trace-uncaught test/node.js",
"lint": "eslint src demo test",
"scan": "npx auditjs@latest ossi --dev --quiet"
@ -53,6 +53,7 @@
"tensorflow"
],
"devDependencies": {
"@microsoft/api-extractor": "^7.18.19",
"@tensorflow/tfjs-backend-cpu": "^3.11.0",
"@tensorflow/tfjs-backend-wasm": "^3.11.0",
"@tensorflow/tfjs-backend-webgl": "^3.11.0",
@ -65,14 +66,14 @@
"@tensorflow/tfjs-node": "^3.11.0",
"@tensorflow/tfjs": "^3.11.0",
"@types/node": "^16.11.7",
"@typescript-eslint/eslint-plugin": "^5.3.1",
"@typescript-eslint/parser": "^5.3.1",
"@typescript-eslint/eslint-plugin": "^5.4.0",
"@typescript-eslint/parser": "^5.4.0",
"@vladmandic/build": "^0.6.4",
"@vladmandic/pilogger": "^0.3.5",
"@vladmandic/tfjs": "github:vladmandic/tfjs",
"canvas": "^2.8.0",
"dayjs": "^1.10.7",
"esbuild": "^0.13.13",
"esbuild": "^0.13.14",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-html": "^6.2.0",
"eslint-plugin-import": "^2.25.3",

View File

@ -3,35 +3,35 @@
/** Generic config type inherited by all module types */
export interface GenericConfig {
/** @property is module enabled? */
/** is module enabled? */
enabled: boolean,
/** @property path to model json file */
/** path to model json file */
modelPath: string,
/** @property how many max frames to go without re-running model if cached results are acceptable */
/** how many max frames to go without re-running model if cached results are acceptable */
skipFrames: number,
/** @property how many max miliseconds to go without re-running model if cached results are acceptable */
/** how many max miliseconds to go without re-running model if cached results are acceptable */
skipTime: number,
}
/** Dectector part of face configuration */
export interface FaceDetectorConfig extends GenericConfig {
/** @property is face rotation correction performed after detecting face? */
/** is face rotation correction performed after detecting face? */
rotation: boolean,
/** @property maximum number of detected faces */
/** maximum number of detected faces */
maxDetected: number,
/** @property minimum confidence for a detected face before results are discarded */
/** minimum confidence for a detected face before results are discarded */
minConfidence: number,
/** @property minimum overlap between two detected faces before one is discarded */
/** minimum overlap between two detected faces before one is discarded */
iouThreshold: number,
/** @property factor used to expand detected face before further analysis
/** factor used to expand detected face before further analysis
* - default: 1.6
* - for high-quality inputs can be reduced to increase precision
* - for video inputs or low-quality inputs can be increased to allow for more flexible tracking
*/
cropFactor: number,
/** @property should child models perform on masked image of a face */
/** should child models perform on masked image of a face */
mask: boolean,
/** @property should face detection return face tensor to be used in some other extenrnal model? */
/** should face detection return face tensor to be used in some other extenrnal model? */
return: boolean,
}
@ -45,13 +45,13 @@ export interface FaceIrisConfig extends GenericConfig {}
* - also used by age and gender detection
*/
export interface FaceDescriptionConfig extends GenericConfig {
/** @property minimum confidence for a detected face before results are discarded */
/** minimum confidence for a detected face before results are discarded */
minConfidence: number,
}
/** Emotion part of face configuration */
export interface FaceEmotionConfig extends GenericConfig {
/** @property minimum confidence for a detected face before results are discarded */
/** minimum confidence for a detected face before results are discarded */
minConfidence: number,
}
@ -74,45 +74,46 @@ export interface FaceConfig extends GenericConfig {
/** Configures all body detection specific options */
export interface BodyConfig extends GenericConfig {
/** @property maximum numboer of detected bodies */
/** maximum numboer of detected bodies */
maxDetected: number,
/** @property minimum confidence for a detected body before results are discarded */
/** minimum confidence for a detected body before results are discarded */
minConfidence: number,
/** detector used for body model before actual analysis */
detector?: {
/** @property path to optional body detector model json file */
/** path to optional body detector model json file */
modelPath: string
},
}
/** Configures all hand detection specific options */
export interface HandConfig extends GenericConfig {
/** @property should hand rotation correction be performed after hand detection? */
/** should hand rotation correction be performed after hand detection? */
rotation: boolean,
/** @property minimum confidence for a detected hand before results are discarded */
/** minimum confidence for a detected hand before results are discarded */
minConfidence: number,
/** @property minimum overlap between two detected hands before one is discarded */
/** minimum overlap between two detected hands before one is discarded */
iouThreshold: number,
/** @property maximum number of detected hands */
/** maximum number of detected hands */
maxDetected: number,
/** @property should hand landmarks be detected or just return detected hand box */
/** should hand landmarks be detected or just return detected hand box */
landmarks: boolean,
detector: {
/** @property path to hand detector model json */
/** path to hand detector model json */
modelPath?: string,
},
skeleton: {
/** @property path to hand skeleton model json */
/** path to hand skeleton model json */
modelPath?: string,
},
}
/** Configures all object detection specific options */
export interface ObjectConfig extends GenericConfig {
/** @property minimum confidence for a detected objects before results are discarded */
/** minimum confidence for a detected objects before results are discarded */
minConfidence: number,
/** @property minimum overlap between two detected objects before one is discarded */
/** minimum overlap between two detected objects before one is discarded */
iouThreshold: number,
/** @property maximum number of detected objects */
/** maximum number of detected objects */
maxDetected: number,
}
@ -123,7 +124,7 @@ export interface ObjectConfig extends GenericConfig {
* remove background or replace it with user-provided background
*/
export interface SegmentationConfig extends GenericConfig {
/** @property blur segmentation output by <number> pixels for more realistic image */
/** blur segmentation output by <number> pixels for more realistic image */
blur: number,
}
@ -132,9 +133,9 @@ export interface SegmentationConfig extends GenericConfig {
* - image filters run with near-zero latency as they are executed on the GPU using WebGL
*/
export interface FilterConfig {
/** @property are image filters enabled? */
/** are image filters enabled? */
enabled: boolean,
/** @property perform image histogram equalization
/** perform image histogram equalization
* - equalization is performed on input as a whole and detected face before its passed for further analysis
*/
equalization: boolean,
@ -142,63 +143,60 @@ export interface FilterConfig {
* - if both width and height are set to 0, there is no resizing
* - if just one is set, second one is scaled automatically
* - if both are set, values are used as-is
* @property
*/
width: number,
/** resize input height
* - if both width and height are set to 0, there is no resizing
* - if just one is set, second one is scaled automatically
* - if both are set, values are used as-is
* @property
*/
height: number,
/** @property return processed canvas imagedata in result */
/** return processed canvas imagedata in result */
return: boolean,
/** @property flip input as mirror image */
/** flip input as mirror image */
flip: boolean,
/** @property range: -1 (darken) to 1 (lighten) */
/** range: -1 (darken) to 1 (lighten) */
brightness: number,
/** @property range: -1 (reduce contrast) to 1 (increase contrast) */
/** range: -1 (reduce contrast) to 1 (increase contrast) */
contrast: number,
/** @property range: 0 (no sharpening) to 1 (maximum sharpening) */
/** range: 0 (no sharpening) to 1 (maximum sharpening) */
sharpness: number,
/** @property range: 0 (no blur) to N (blur radius in pixels) */
/** range: 0 (no blur) to N (blur radius in pixels) */
blur: number
/** @property range: -1 (reduce saturation) to 1 (increase saturation) */
/** range: -1 (reduce saturation) to 1 (increase saturation) */
saturation: number,
/** @property range: 0 (no change) to 360 (hue rotation in degrees) */
/** range: 0 (no change) to 360 (hue rotation in degrees) */
hue: number,
/** @property image negative */
/** image negative */
negative: boolean,
/** @property image sepia colors */
/** image sepia colors */
sepia: boolean,
/** @property image vintage colors */
/** image vintage colors */
vintage: boolean,
/** @property image kodachrome colors */
/** image kodachrome colors */
kodachrome: boolean,
/** @property image technicolor colors */
/** image technicolor colors */
technicolor: boolean,
/** @property image polaroid camera effect */
/** image polaroid camera effect */
polaroid: boolean,
/** @property range: 0 (no pixelate) to N (number of pixels to pixelate) */
/** range: 0 (no pixelate) to N (number of pixels to pixelate) */
pixelate: number,
}
/** Controlls gesture detection */
export interface GestureConfig {
/** @property is gesture detection enabled? */
/** is gesture detection enabled? */
enabled: boolean,
}
/** Possible TensorFlow backends */
export type BackendType = ['cpu', 'wasm', 'webgl', 'humangl', 'tensorflow', 'webgpu'];
/** Possible values for `human.warmup` */
export type WarmupType = ['' | 'none' | 'face' | 'full' | 'body'];
/**
* Configuration interface definition for **Human** library
*
* Contains all configurable parameters
* @typedef Config
*
* Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262)
*/
export interface Config {
@ -258,25 +256,25 @@ export interface Config {
/** Internal Variable */
skipAllowed: boolean;
/** {@link FilterConfig} */
/** Filter config {@link FilterConfig} */
filter: Partial<FilterConfig>,
/** {@link GestureConfig} */
/** Gesture config {@link GestureConfig} */
gesture: Partial<GestureConfig>;
/** {@link FaceConfig} */
/** Face config {@link FaceConfig} */
face: Partial<FaceConfig>,
/** {@link BodyConfig} */
/** Body config {@link BodyConfig} */
body: Partial<BodyConfig>,
/** {@link HandConfig} */
/** Hand config {@link HandConfig} */
hand: Partial<HandConfig>,
/** {@link ObjectConfig} */
/** Object config {@link ObjectConfig} */
object: Partial<ObjectConfig>,
/** {@link SegmentationConfig} */
/** Segmentation config {@link SegmentationConfig} */
segmentation: Partial<SegmentationConfig>,
}

View File

@ -4,7 +4,7 @@ import type { env } from './util/env';
export * from './config';
export * from './result';
export type { Tensor, TensorLike } from './tfjs/types';
export type { Tensor, TensorLike, GraphModel, Rank } from './tfjs/types';
export type { DrawOptions } from './util/draw';
export type { Descriptor } from './face/match';
export type { Box, Point } from './result';

View File

@ -3,12 +3,11 @@ export type Descriptor = Array<number>
export type MatchOptions = { order?: number, threshold?: number, multiplier?: number, min?: number, max?: number } | undefined;
/** Calculates distance between two descriptors
* @param {object} options
* @param {number} options.order algorithm to use
* - Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2
* @param {number} options.multiplier by how much to enhance difference analysis in range of 1..100
* - default is 20 which normalizes results to similarity above 0.5 can be considered a match
* @returns {number}
* @param options - calculation options
* - order - algorithm to use
* Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2
* - multiplier - by how much to enhance difference analysis in range of 1..100
* default is 20 which normalizes results to similarity above 0.5 can be considered a match
*/
export function distance(descriptor1: Descriptor, descriptor2: Descriptor, options: MatchOptions = { order: 2, multiplier: 25 }) {
// general minkowski distance, euclidean distance is limited case where order is 2
@ -30,15 +29,15 @@ const normalizeDistance = (dist, order, min, max) => {
};
/** Calculates normalized similarity between two face descriptors based on their `distance`
* @param {object} options
* @param {number} options.order algorithm to use
* - Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2
* @param {number} options.multiplier by how much to enhance difference analysis in range of 1..100
* - default is 20 which normalizes results to similarity above 0.5 can be considered a match
* @param {number} options.min normalize similarity result to a given range
* @param {number} options.max normalzie similarity resutl to a given range
* - default is 0.2...0.8
* @returns {number} similarity between two face descriptors normalized to 0..1 range where 0 is no similarity and 1 is perfect similarity
* @param options - calculation options
* - order - algorithm to use
* Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2
* - multiplier - by how much to enhance difference analysis in range of 1..100
* default is 20 which normalizes results to similarity above 0.5 can be considered a match
* - min - normalize similarity result to a given range
* - max - normalzie similarity resutl to a given range
* default is 0.2...0.8
* Returns similarity between two face descriptors normalized to 0..1 range where 0 is no similarity and 1 is perfect similarity
*/
export function similarity(descriptor1: Descriptor, descriptor2: Descriptor, options: MatchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 }) {
const dist = distance(descriptor1, descriptor2, options);
@ -46,12 +45,10 @@ export function similarity(descriptor1: Descriptor, descriptor2: Descriptor, opt
}
/** Matches given descriptor to a closest entry in array of descriptors
* @param descriptor face descriptor
* @param descriptors array of face descriptors to commpare given descriptor to
* @param {object} options
* @param {number} options.order see {@link similarity}
* @param {number} options.multiplier see {@link similarity}
* @returns {object}
* @param descriptor - face descriptor
* @param descriptors - array of face descriptors to commpare given descriptor to
* @param options - see {@link similarity}
* Returns
* - `index` index array index where best match was found or -1 if no matches
* - {@link distance} calculated `distance` of given descriptor to the best match
* - {@link similarity} calculated normalized `similarity` of given descriptor to the best match

View File

@ -5,34 +5,26 @@
import type { GestureResult } from '../result';
import * as fingerPose from '../hand/fingerpose';
/**
* @typedef FaceGesture
*/
/** face gesture type */
export type FaceGesture =
`facing ${'left' | 'center' | 'right'}`
| `blink ${'left' | 'right'} eye`
| `mouth ${number}% open`
| `head ${'up' | 'down'}`;
/**
* @typedef IrisGesture
*/
/** iris gesture type */
export type IrisGesture =
'facing center'
| `looking ${'left' | 'right' | 'up' | 'down'}`
| 'looking center';
/**
* @typedef BodyGesture
*/
/** body gesture type */
export type BodyGesture =
`leaning ${'left' | 'right'}`
| `raise ${'left' | 'right'} hand`
| 'i give up';
/**
* @typedef BodyGesture
*/
/** hand gesture type */
export type HandGesture =
`${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} forward`
| `${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} up`

View File

@ -40,12 +40,6 @@ import type { Input, Tensor, DrawOptions, Config, Result, FaceResult, HandResult
// type exports
export * from './exports';
/** Instance of TensorFlow/JS used by Human
* - Can be TFJS that is bundled with `Human` or a manually imported TFJS library
* @external [API](https://js.tensorflow.org/api/latest/)
*/
export type TensorFlow = typeof tf;
/** **Human** library main class
*
* All methods and properties are available only as members of Human class
@ -54,7 +48,7 @@ export type TensorFlow = typeof tf;
* - Results object definition: {@link Result}
* - Possible inputs: {@link Input}
*
* @param userConfig: {@link Config}
* @param userConfig - {@link Config}
* @returns instance of {@link Human}
*/
export class Human {
@ -82,17 +76,17 @@ export class Human {
/** Instance of TensorFlow/JS used by Human
* - Can be embedded or externally provided
* @internal
*
* [TFJS API]<https://js.tensorflow.org/api/latest/>
* [TFJS API]: {@link https://js.tensorflow.org/api/latest/}
*/
tf: TensorFlow;
tf;
/** Object containing environment information used for diagnostics */
env: Env;
/** Draw helper classes that can draw detected objects on canvas using specified draw
* @property options global settings for all draw operations, can be overriden for each draw method {@link DrawOptions}
* - canvas: draws input to canvas
* - options: are global settings for all draw operations, can be overriden for each draw method {@link DrawOptions}
* - face, body, hand, gesture, object, person: draws detected results as overlays on canvas
*/
draw: { canvas: typeof draw.canvas, face: typeof draw.face, body: typeof draw.body, hand: typeof draw.hand, gesture: typeof draw.gesture, object: typeof draw.object, person: typeof draw.person, all: typeof draw.all, options: DrawOptions };
@ -103,7 +97,6 @@ export class Human {
models: models.Models;
/** Container for events dispatched by Human
* {@type} EventTarget
* Possible events:
* - `create`: triggered when Human object is instantiated
* - `load`: triggered when models are loaded (explicitly or on-demand)
@ -114,9 +107,9 @@ export class Human {
*/
events: EventTarget | undefined;
/** Reference face triangualtion array of 468 points, used for triangle references between points */
faceTriangulation: typeof facemesh.triangulation;
faceTriangulation: number[];
/** Refernce UV map of 468 values, used for 3D mapping of the face mesh */
faceUVMap: typeof facemesh.uvmap;
faceUVMap: [number, number][];
/** Performance object that contains values for all recently performed operations */
performance: Record<string, number>; // perf members are dynamically defined as needed
#numTensors: number;
@ -127,9 +120,7 @@ export class Human {
// definition end
/** Constructor for **Human** library that is futher used for all operations
*
* @param {Config} userConfig
* @returns {Human}
* @param userConfig - user configuration object {@link Config}
*/
constructor(userConfig?: Partial<Config>) {
this.env = env;
@ -177,8 +168,7 @@ export class Human {
this.emit('create');
}
// helper function: measure tensor leak
/** @hidden */
/** internal function to measure tensor leaks */
analyze = (...msg: string[]) => {
if (!this.#analyzeMemoryLeaks) return;
const currentTensors = this.tf.engine().state.numTensors;
@ -188,8 +178,7 @@ export class Human {
if (leaked !== 0) log(...msg, leaked);
};
// quick sanity check on inputs
/** @hidden */
/** internal function for quick sanity check on inputs @hidden */
#sanity = (input: Input): null | string => {
if (!this.#checkSanity) return null;
if (!input) return 'input is not defined';
@ -214,9 +203,11 @@ export class Human {
return validate(defaults, userConfig || this.config);
}
/** Exports face matching methods */
/** Exports face matching methods {@link match#similarity} */
public similarity = match.similarity;
/** Exports face matching methods {@link match#distance} */
public distance = match.distance;
/** Exports face matching methods {@link match#match} */
public match = match.match;
/** Utility wrapper for performance.now() */
@ -226,9 +217,9 @@ export class Human {
/** Process input as return canvas and tensor
*
* @param input: {@link Input}
* @param {boolean} input.getTensor should image processing also return tensor or just canvas
* @returns { tensor, canvas }
* @param input - any input {@link Input}
* @param getTensor - should image processing also return tensor or just canvas
* Returns object with `tensor` and `canvas`
*/
image(input: Input, getTensor: boolean = true) {
return image.process(input, this.config, getTensor);
@ -236,13 +227,10 @@ export class Human {
/** Segmentation method takes any input and returns processed canvas with body segmentation
* - Segmentation is not triggered as part of detect process
*
* Returns:
*
* @param input: {@link Input}
* @param background?: {@link Input}
* @param input - {@link Input}
* @param background - {@link Input}
* - Optional parameter background is used to fill the background with specific input
* @returns {object}
* Returns:
* - `data` as raw data array with per-pixel segmentation values
* - `canvas` as canvas which is input image filtered with segementation data and optionally merged with background image. canvas alpha values are set to segmentation values for easy merging
* - `alpha` as grayscale canvas that represents segmentation alpha values
@ -253,7 +241,7 @@ export class Human {
/** Enhance method performs additional enhacements to face image previously detected for futher processing
*
* @param input: Tensor as provided in human.result.face[n].tensor
* @param input - Tensor as provided in human.result.face[n].tensor
* @returns Tensor
*/
// eslint-disable-next-line class-methods-use-this
@ -266,7 +254,6 @@ export class Human {
* - when passing manually generated tensors:
* - both input tensors must be in format [1, height, width, 3]
* - if resolution of tensors does not match, second tensor will be resized to match resolution of the first tensor
* @returns {number}
* - return value is pixel similarity score normalized by input resolution and rgb channels
*/
compare(firstImageTensor: Tensor, secondImageTensor: Tensor): Promise<number> {
@ -277,8 +264,6 @@ export class Human {
* - Normally done implicitly during initial load phase
* - Call to explictly register and initialize TFJS backend without any other operations
* - Use when changing backend during runtime
*
* @returns {void}
*/
async init(): Promise<void> {
await backend.check(this, true);
@ -288,8 +273,7 @@ export class Human {
/** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
*
* @param userConfig?: {@link Config}
* @return Promise<void>
* @param userConfig - {@link Config}
*/
async load(userConfig?: Partial<Config>): Promise<void> {
this.state = 'load';
@ -323,8 +307,7 @@ export class Human {
if (current > (this.performance.loadModels as number || 0)) this.performance.loadModels = this.env.perfadd ? (this.performance.loadModels || 0) + current : current;
}
// emit event
/** @hidden */
/** emit event */
emit = (event: string) => {
if (this.events && this.events.dispatchEvent) this.events?.dispatchEvent(new Event(event));
};
@ -332,8 +315,8 @@ export class Human {
/** Runs interpolation using last known result and returns smoothened result
* Interpolation is based on time since last known result so can be called independently
*
* @param result?: {@link Result} optional use specific result set to run interpolation on
* @returns result: {@link Result}
* @param result - {@link Result} optional use specific result set to run interpolation on
* @returns result - {@link Result}
*/
next(result: Result = this.result): Result {
return interpolate.calc(result, this.config) as Result;
@ -342,8 +325,8 @@ export class Human {
/** Warmup method pre-initializes all configured models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
* @param userConfig?: {@link Config}
* @returns result: {@link Result}
* @param userConfig - {@link Config}
* @returns result - {@link Result}
*/
async warmup(userConfig?: Partial<Config>) {
const t0 = now();
@ -379,9 +362,9 @@ export class Human {
* - Run inference for all configured models
* - Process and return result: {@link Result}
*
* @param input: {@link Input}
* @param userConfig?: {@link Config}
* @returns result: {@link Result}
* @param input - {@link Input}
* @param userConfig - {@link Config}
* @returns result - {@link Result}
*/
async detect(input: Input, userConfig?: Partial<Config>): Promise<Result> {
// detection happens inside a promise

View File

@ -62,6 +62,7 @@ export interface FaceResult {
tensor?: Tensor,
}
/** Body Result keypoints */
export interface BodyKeypoint {
/** body part name */
part: string,
@ -136,7 +137,6 @@ export interface ObjectResult {
}
/** Gesture combined results
* @typedef Gesture Type
* Each result has:
* - part: part name and number where gesture was detected: `face`, `iris`, `body`, `hand`
* - gesture: gesture detected
@ -191,6 +191,6 @@ export interface Result {
readonly timestamp: number,
/** getter property that returns unified persons object */
persons: Array<PersonResult>,
/** @property Last known error message */
/** Last known error message */
error: string | null;
}

View File

@ -4,7 +4,7 @@
* TensorFlow Tensor type
* @external
*/
export { Tensor, TensorLike } from '@tensorflow/tfjs-core/dist/index';
export { Tensor, TensorLike, Rank } from '@tensorflow/tfjs-core/dist/index';
/**
* TensorFlow GraphModel type

View File

@ -110,6 +110,7 @@ export class Env {
}
}
/** update backend information */
async updateBackend() {
// analyze backends
this.backends = Object.keys(tf.engine().registryFactory);
@ -143,6 +144,7 @@ export class Env {
}
}
/** update cpu information */
async updateCPU() {
const cpu = { model: '', flags: [] };
if (this.node && this.platform.startsWith('linux')) {

View File

@ -71,7 +71,7 @@
}
async function wait(time) {
const waiting = new Promise((resolve) => setTimeout(() => resolve(), time));
const waiting = new Promise((resolve) => { setTimeout(() => resolve(), time); });
await waiting;
}

View File

@ -13,7 +13,7 @@
"allowUnusedLabels": false,
"alwaysStrict": true,
"declaration": true,
"declarationMap": false,
"declarationMap": true,
"emitDecoratorMetadata": true,
"esModuleInterop": false,
"exactOptionalPropertyTypes": true,

2
wiki

@ -1 +1 @@
Subproject commit e0e2b9a2ac15a4569abc1e8281e7636de2c45aef
Subproject commit 799273cb94e64d08280479435cc11daedabd5a60