update wiki pages

master
Vladimir Mandic 2022-11-18 13:14:10 -05:00
parent 6ea5ea911d
commit 4a6f9797e3
16 changed files with 170 additions and 215 deletions

@ -116,8 +116,8 @@ Cross-Origin-Embedder-Policy: require-corp
Or configure `Human` load WASM files directly from a CDN: Or configure `Human` load WASM files directly from a CDN:
```json ```js
wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/' wasmPath = 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/';
``` ```
Note that version of WASM binaries **must** match version of TFJS used by `Human` library Note that version of WASM binaries **must** match version of TFJS used by `Human` library

@ -40,6 +40,7 @@ Dev build runs following operations:
Production build is started by running `npm run build` Production build is started by running `npm run build`
<!-- eslint-skip -->
```js ```js
2022-07-18 08:21:08 DATA: Build { name: '@vladmandic/human', version: '2.9.0' } 2022-07-18 08:21:08 DATA: Build { name: '@vladmandic/human', version: '2.9.0' }
2022-07-18 08:21:08 INFO: Analyze: { modelsDir: '../human-models/models', modelsOut: 'models/models.json' } 2022-07-18 08:21:08 INFO: Analyze: { modelsDir: '../human-models/models', modelsOut: 'models/models.json' }

@ -10,6 +10,7 @@
Overview of `Config` object type: Overview of `Config` object type:
<!-- eslint-skip -->
```ts ```ts
interface Config { interface Config {
backend: string // backend engine to be used for processing backend: string // backend engine to be used for processing
@ -49,9 +50,9 @@ for example,
```js ```js
const myConfig = { const myConfig = {
baseModelPath: `https://cdn.jsdelivr.net/npm/@vladmandic/human/models/`, baseModelPath: 'https://cdn.jsdelivr.net/npm/@vladmandic/human/models/',
segmentation: { enabled: true }, segmentation: { enabled: true },
} };
const human = new Human(myConfig); const human = new Human(myConfig);
const result = await human.detect(input); const result = await human.detect(input);
``` ```

@ -18,6 +18,7 @@ By default, secure http2 web server will run on port `10031` and unsecure http s
Development environment is started by running `npm run dev` Development environment is started by running `npm run dev`
<!-- eslint-skip -->
```js ```js
2021-09-10 21:03:37 INFO: @vladmandic/human version 2.1.5 2021-09-10 21:03:37 INFO: @vladmandic/human version 2.1.5
2021-09-10 21:03:37 INFO: User: vlado Platform: linux Arch: x64 Node: v16.5.0 2021-09-10 21:03:37 INFO: User: vlado Platform: linux Arch: x64 Node: v16.5.0

40
Diag.md

@ -10,26 +10,25 @@ console.log(human.version);
## Enable console debug output ## Enable console debug output
```js ```js
const human = new Human({ debug: true }) const human = new Human({ debug: true });
``` ```
## Get current configuration ## Get current configuration
```js ```js
console.log(human.config) console.log(human.config);
``` ```
```js ```json
{ {
backend: 'tensorflow', "backend": "tensorflow",
modelBasePath: 'file://models/', "modelBasePath": "file://models/",
wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/', "wasmPath": "https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/",
debug: true, "debug": true
...
} }
``` ```
## Get current environment details ## Get current environment details
```js ```js
console.log(human.env) console.log(human.env);
``` ```
```json ```json
{ {
@ -43,7 +42,7 @@ console.log(human.env)
"wasm": {"supported":true,"simd":true,"multithread":true}, "wasm": {"supported":true,"simd":true,"multithread":true},
"webgl": {"supported":true,"version":"WebGL 2.0 (OpenGL ES 3.0 Chromium)","renderer":"WebKit WebGL"}, "webgl": {"supported":true,"version":"WebGL 2.0 (OpenGL ES 3.0 Chromium)","renderer":"WebKit WebGL"},
"webgpu": {"supported":true,"adapter":"Default"}, "webgpu": {"supported":true,"adapter":"Default"},
"kernels": [...] "kernels": []
} }
``` ```
@ -53,7 +52,7 @@ console.log(human.env)
console.log(models); console.log(models);
``` ```
```js ```js
[ models = [
{ name: 'face', loaded: true }, { name: 'face', loaded: true },
{ name: 'posenet', loaded: false }, { name: 'posenet', loaded: false },
{ name: 'blazepose', loaded: false }, { name: 'blazepose', loaded: false },
@ -68,18 +67,16 @@ console.log(human.env)
{ name: 'centernet', loaded: false }, { name: 'centernet', loaded: false },
{ name: 'faceres', loaded: true }, { name: 'faceres', loaded: true },
{ name: 'segmentation', loaded: false }, { name: 'segmentation', loaded: false },
] ];
``` ```
## Get memory usage information ## Get memory usage information
```js ```js
console.log(human.tf.engine().memory())); console.log(human.tf.engine().memory());
``` ```
```js ```js
{ memory = { numTensors: 1053, numDataBuffers: 1053, numBytes: 42736024 };
numTensors: 1053, numDataBuffers: 1053, numBytes: 42736024
}
``` ```
## Get current TensorFlow flags ## Get current TensorFlow flags
@ -87,9 +84,7 @@ console.log(human.env)
console.log(human.tf.ENV.flags); console.log(human.tf.ENV.flags);
``` ```
```js ```js
{ flags = { DEBUG: false, PROD: true, CPU_HANDOFF_SIZE_THRESHOLD: 128 };
DEBUG: false, PROD: true, CPU_HANDOFF_SIZE_THRESHOLD: 128
}
``` ```
## Get performance information ## Get performance information
@ -99,10 +94,9 @@ console.log(human.env)
console.log(result.performance); console.log(result.performance);
``` ```
```js ```js
{ performance = {
backend: 1, load: 283, image: 1, frames: 1, cached: 0, changed: 1, total: 947, draw: 0, backend: 1, load: 283, image: 1, frames: 1, cached: 0, changed: 1, total: 947, draw: 0, face: 390, emotion: 15, embedding: 97, body: 97, hand: 142, object: 312, gesture: 0,
face: 390, emotion: 15, embedding: 97, body: 97, hand: 142, object: 312, gesture: 0 };
}
``` ```
## All possible fatal errors ## All possible fatal errors

@ -27,6 +27,7 @@ If `options.drawLabels` is enabled (default)
## Default Label Templates ## Default Label Templates
```js ```js
drawOptions = {
faceLabels: `face faceLabels: `face
confidence: [score]% confidence: [score]%
[gender] [genderScore]% [gender] [genderScore]%
@ -43,15 +44,14 @@ If `options.drawLabels` is enabled (default)
handLabels: '[label] [score]%', handLabels: '[label] [score]%',
fingerLabels: '[label]', fingerLabels: '[label]',
gestureLabels: '[where] [who]: [what]', gestureLabels: '[where] [who]: [what]',
};
``` ```
## Example ## Example
Example of custom labels: Example of custom labels:
```js ```js
import * as Human from '@vladmandic/human'; const drawOptions = {
...
const drawOptions: Partial<Human.DrawOptions> = {
bodyLabels: `person confidence is [score]% and has ${human.result?.body?.[0]?.keypoints.length || 'no'} keypoints`, bodyLabels: `person confidence is [score]% and has ${human.result?.body?.[0]?.keypoints.length || 'no'} keypoints`,
}; };
human.draw.all(dom.canvas, human.result, drawOptions); human.draw.all(dom.canvas, human.result, drawOptions);

@ -81,6 +81,7 @@ Similarity function is based on general *Minkowski distance* between all points
Changing `order` can make similarity matching more or less sensitive (default order is 2nd order) Changing `order` can make similarity matching more or less sensitive (default order is 2nd order)
For example, those will produce slighly different results: For example, those will produce slighly different results:
<!-- eslint-skip -->
```js ```js
const similarity2ndOrder = human.match.similarity(firstEmbedding, secondEmbedding, { order = 2 }); const similarity2ndOrder = human.match.similarity(firstEmbedding, secondEmbedding, { order = 2 });
const similarity3rdOrder = human.match.similarity(firstEmbedding, secondEmbedding, { order = 3 }); const similarity3rdOrder = human.match.similarity(firstEmbedding, secondEmbedding, { order = 3 });
@ -118,7 +119,7 @@ For example, see `/demo/facematch/facematch.js` and example database `/demo/face
> download db with known faces using http/https > download db with known faces using http/https
```js ```js
let res = await fetch('/demo/facematch/faces.json'); const res = await fetch('/demo/facematch/faces.json');
db = (res && res.ok) ? await res.json() : []; db = (res && res.ok) ? await res.json() : [];
``` ```
> download db with known faces from a local file > download db with known faces from a local file

@ -26,14 +26,14 @@ There are three pre-defined methods:
Example output of `result.gesture`: Example output of `result.gesture`:
```js ```js
[ gesture = [
{face: "0", gesture: "facing camera"} { face: '0', gesture: 'facing camera' },
{face: "0", gesture: "head up"} { face: '0', gesture: 'head up' },
{iris: "0", gesture: "looking center"} { iris: '0', gesture: 'looking center' },
{body: "0", gesture: "i give up"} { body: '0', gesture: 'i give up' },
{body: "0", gesture: "leaning left"} { body: '0', gesture: 'leaning left' },
{hand: "0", gesture: "thumb forward middlefinger up"} { hand: '0', gesture: 'thumb forward middlefinger up' },
] ];
``` ```
Where number after gesture refers to number of person that detection belongs to in scenes with multiple people. Where number after gesture refers to number of person that detection belongs to in scenes with multiple people.

141
Home.md

@ -25,6 +25,8 @@
- Simple unified API - Simple unified API
- Built-in Image, Video and WebCam handling - Built-in Image, Video and WebCam handling
[*Jump to Quick Start*](#quick-start)
<br> <br>
## Compatibility ## Compatibility
@ -138,76 +140,6 @@
*Suggestions are welcome!* *Suggestions are welcome!*
<hr><br>
## App Examples
Visit [Examples gallery](https://vladmandic.github.io/human/samples/index.html) for more examples
<https://vladmandic.github.io/human/samples/index.html>
![samples](assets/samples.jpg)
<br>
## Options
All options as presented in the demo application...
> [demo/index.html](demo/index.html)
![Options visible in demo](assets/screenshot-menu.png)
<br>
**Results Browser:**
[ *Demo -> Display -> Show Results* ]<br>
![Results](assets/screenshot-results.png)
<br>
## Advanced Examples
1. **Face Similarity Matching:**
Extracts all faces from provided input images,
sorts them by similarity to selected face
and optionally matches detected face with database of known people to guess their names
> [demo/facematch](demo/facematch/index.html)
![Face Matching](assets/screenshot-facematch.jpg)
2. **Face ID:**
Performs validation check on a webcam input to detect a real face and matches it to known faces stored in database
> [demo/faceid](demo/faceid/index.html)
![Face Matching](assets/screenshot-faceid.jpg)
<br>
3. **3D Rendering:**
> [human-motion](https://github.com/vladmandic/human-motion)
![Face3D](https://github.com/vladmandic/human-motion/raw/main/assets/screenshot-face.jpg)
![Body3D](https://github.com/vladmandic/human-motion/raw/main/assets/screenshot-body.jpg)
![Hand3D](https://github.com/vladmandic/human-motion/raw/main/assets/screenshot-hand.jpg)
<br>
4. **VR Model Tracking:**
> [human-three-vrm](https://github.com/vladmandic/human-three-vrm)
> [human-bjs-vrm](https://github.com/vladmandic/human-bjs-vrm)
![ThreeVRM](https://github.com/vladmandic/human-three-vrm/raw/main/assets/human-vrm-screenshot.jpg)
5. **Human as OS native application:**
> [human-electron](https://github.com/vladmandic/human-electron)
<br>
**468-Point Face Mesh Defails:**
(view in full resolution to see keypoints)
![FaceMesh](assets/facemesh.png)
<br><hr><br> <br><hr><br>
## Quick Start ## Quick Start
@ -216,33 +148,16 @@ Simply load `Human` (*IIFE version*) directly from a cloud CDN in your HTML file
(pick one: `jsdelirv`, `unpkg` or `cdnjs`) (pick one: `jsdelirv`, `unpkg` or `cdnjs`)
```html ```html
<!DOCTYPE HTML>
<script src="https://cdn.jsdelivr.net/npm/@vladmandic/human/dist/human.js"></script> <script src="https://cdn.jsdelivr.net/npm/@vladmandic/human/dist/human.js"></script>
<script src="https://unpkg.dev/@vladmandic/human/dist/human.js"></script> <script src="https://unpkg.dev/@vladmandic/human/dist/human.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/human/2.1.5/human.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/human/3.0.0/human.js"></script>
``` ```
For details, including how to use `Browser ESM` version or `NodeJS` version of `Human`, see [**Installation**](https://github.com/vladmandic/human/wiki/Install) For details, including how to use `Browser ESM` version or `NodeJS` version of `Human`, see [**Installation**](https://github.com/vladmandic/human/wiki/Install)
<br> <br>
## Inputs
`Human` library can process all known input types:
- `Image`, `ImageData`, `ImageBitmap`, `Canvas`, `OffscreenCanvas`, `Tensor`,
- `HTMLImageElement`, `HTMLCanvasElement`, `HTMLVideoElement`, `HTMLMediaElement`
Additionally, `HTMLVideoElement`, `HTMLMediaElement` can be a standard `<video>` tag that links to:
- WebCam on user's system
- Any supported video type
e.g. `.mp4`, `.avi`, etc.
- Additional video types supported via *HTML5 Media Source Extensions*
e.g.: **HLS** (*HTTP Live Streaming*) using `hls.js` or **DASH** (*Dynamic Adaptive Streaming over HTTP*) using `dash.js`
- **WebRTC** media track using built-in support
<br><hr><br>
## Code Examples ## Code Examples
Simple app that uses Human to process video input and Simple app that uses Human to process video input and
@ -270,6 +185,7 @@ function detectVideo() {
human.draw.gesture(outputCanvas, result.gesture); human.draw.gesture(outputCanvas, result.gesture);
// and loop immediate to the next frame // and loop immediate to the next frame
requestAnimationFrame(detectVideo); requestAnimationFrame(detectVideo);
return result;
}); });
} }
@ -379,6 +295,33 @@ And for even better results, you can run detection in a separate web worker thre
<br><hr><br> <br><hr><br>
## Inputs
`Human` library can process all known input types:
- `Image`, `ImageData`, `ImageBitmap`, `Canvas`, `OffscreenCanvas`, `Tensor`,
- `HTMLImageElement`, `HTMLCanvasElement`, `HTMLVideoElement`, `HTMLMediaElement`
Additionally, `HTMLVideoElement`, `HTMLMediaElement` can be a standard `<video>` tag that links to:
- WebCam on user's system
- Any supported video type
e.g. `.mp4`, `.avi`, etc.
- Additional video types supported via *HTML5 Media Source Extensions*
e.g.: **HLS** (*HTTP Live Streaming*) using `hls.js` or **DASH** (*Dynamic Adaptive Streaming over HTTP*) using `dash.js`
- **WebRTC** media track using built-in support
<br><hr><br>
## Detailed Usage
- [**Wiki Home**](https://github.com/vladmandic/human/wiki)
- [**List of all available methods, properies and namespaces**](https://github.com/vladmandic/human/wiki/Usage)
- [**TypeDoc API Specification - Main class**](https://vladmandic.github.io/human/typedoc/classes/Human.html)
- [**TypeDoc API Specification - Full**](https://vladmandic.github.io/human/typedoc/)
<br><hr><br>
## TypeDefs ## TypeDefs
`Human` is written using TypeScript strong typing and ships with full **TypeDefs** for all classes defined by the library bundled in `types/human.d.ts` and enabled by default `Human` is written using TypeScript strong typing and ships with full **TypeDefs** for all classes defined by the library bundled in `types/human.d.ts` and enabled by default
@ -386,16 +329,13 @@ And for even better results, you can run detection in a separate web worker thre
*Note*: This does not include embedded `tfjs` *Note*: This does not include embedded `tfjs`
If you want to use embedded `tfjs` inside `Human` (`human.tf` namespace) and still full **typedefs**, add this code: If you want to use embedded `tfjs` inside `Human` (`human.tf` namespace) and still full **typedefs**, add this code:
```js > import type * as tfjs from '@vladmandic/human/dist/tfjs.esm';
import type * as tfjs from '@vladmandic/human/dist/tfjs.esm'; > const tf = human.tf as typeof tfjs;
...
const tf = human.tf as typeof tfjs;
```
This is not enabled by default as `Human` does not ship with full **TFJS TypeDefs** due to size considerations This is not enabled by default as `Human` does not ship with full **TFJS TypeDefs** due to size considerations
Enabling `tfjs` TypeDefs as above creates additional project (dev-only as only types are required) dependencies as defined in `@vladmandic/human/dist/tfjs.esm.d.ts`: Enabling `tfjs` TypeDefs as above creates additional project (dev-only as only types are required) dependencies as defined in `@vladmandic/human/dist/tfjs.esm.d.ts`:
@tensorflow/tfjs-core, @tensorflow/tfjs-converter, @tensorflow/tfjs-backend-wasm, @tensorflow/tfjs-backend-webgl > @tensorflow/tfjs-core, @tensorflow/tfjs-converter, @tensorflow/tfjs-backend-wasm, @tensorflow/tfjs-backend-webgl
<br><hr><br> <br><hr><br>
@ -427,7 +367,7 @@ For more info, see [**Configuration Details**](https://github.com/vladmandic/hum
<br><hr><br> <br><hr><br>
`Human` library is written in [TypeScript](https://www.typescriptlang.org/docs/handbook/intro.html) **4.8** using [TensorFlow/JS](https://www.tensorflow.org/js/) **4.0** and conforming to latest `JavaScript` [ECMAScript version 2022](https://262.ecma-international.org/) standard `Human` library is written in [TypeScript](https://www.typescriptlang.org/docs/handbook/intro.html) **4.9** using [TensorFlow/JS](https://www.tensorflow.org/js/) **4.0** and conforming to latest `JavaScript` [ECMAScript version 2022](https://262.ecma-international.org/) standard
Build target for distributables is `JavaScript` [EMCAScript version 2018](https://262.ecma-international.org/9.0/) Build target for distributables is `JavaScript` [EMCAScript version 2018](https://262.ecma-international.org/9.0/)
@ -437,12 +377,3 @@ For details see [**Wiki Pages**](https://github.com/vladmandic/human/wiki)
and [**API Specification**](https://vladmandic.github.io/human/typedoc/classes/Human.html) and [**API Specification**](https://vladmandic.github.io/human/typedoc/classes/Human.html)
<br> <br>
[![](https://img.shields.io/static/v1?label=Sponsor&message=%E2%9D%A4&logo=GitHub&color=%23fe8e86)](https://github.com/sponsors/vladmandic)
![Stars](https://img.shields.io/github/stars/vladmandic/human?style=flat-square&svg=true)
![Forks](https://badgen.net/github/forks/vladmandic/human)
![Code Size](https://img.shields.io/github/languages/code-size/vladmandic/human?style=flat-square&svg=true)
![CDN](https://data.jsdelivr.com/v1/package/npm/@vladmandic/human/badge)<br>
![Downloads](https://img.shields.io/npm/dw/@vladmandic/human.png?style=flat-square)
![Downloads](https://img.shields.io/npm/dm/@vladmandic/human.png?style=flat-square)
![Downloads](https://img.shields.io/npm/dy/@vladmandic/human.png?style=flat-square)

@ -9,6 +9,7 @@
`Human` allows input to be in many different formats and will perform automatic processing of inputs to interally required format `Human` allows input to be in many different formats and will perform automatic processing of inputs to interally required format
<!-- eslint-skip -->
```ts ```ts
type Input = Tensor | AnyCanvas | AnyImage | AnyVideo | ImageObjects | ExternalCanvas; type Input = Tensor | AnyCanvas | AnyImage | AnyVideo | ImageObjects | ExternalCanvas;
type AnyCanvas = HTMLCanvasElement | OffscreenCanvas; type AnyCanvas = HTMLCanvasElement | OffscreenCanvas;
@ -69,10 +70,10 @@ Using `node-canvas` to load and decode input files only
```js ```js
const canvas = require('canvas'); const canvas = require('canvas');
const img = await canvas.loadImage(inputFile); // read and decode image file const img = await canvas.loadImage(inputFile); // read and decode image file
const canvas = canvas.createCanvas(img.width, img.height); // create canvas element const myCanvas = canvas.createCanvas(img.width, img.height); // create canvas element
const ctx = canvas.getContext('2d'); const ctx = myCanvas.getContext('2d');
ctx.drawImage(img, 0, 0, img.width, img.height); // draw loaded image onto canvas ctx.drawImage(img, 0, 0, img.width, img.height); // draw loaded image onto canvas
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height); // read pixel data from canvas const imageData = ctx.getImageData(0, 0, myCanvas.width, myCanvas.height); // read pixel data from canvas
const tensor = human.tf.tensor(imageData.data); // create tensor from pixel data const tensor = human.tf.tensor(imageData.data); // create tensor from pixel data
const result = await human.detect(tensor); // perform processing const result = await human.detect(tensor); // perform processing
human.tf.dispose(tensor); // dispose input data, required when working with tensors human.tf.dispose(tensor); // dispose input data, required when working with tensors

@ -17,6 +17,7 @@ Simply load `Human` (*IIFE version*) directly from a cloud CDN in your HTML file
(pick one: `jsdelirv`, `unpkg` or `cdnjs`) (pick one: `jsdelirv`, `unpkg` or `cdnjs`)
```html ```html
<!DOCTYPE HTML>
<script src="https://cdn.jsdelivr.net/npm/@vladmandic/human/dist/human.js"></script> <script src="https://cdn.jsdelivr.net/npm/@vladmandic/human/dist/human.js"></script>
<script src="https://unpkg.dev/@vladmandic/human/dist/human.js"></script> <script src="https://unpkg.dev/@vladmandic/human/dist/human.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/human/1.4.1/human.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/human/1.4.1/human.js"></script>
@ -40,9 +41,16 @@ Defaults:
```json ```json
{ {
"main": "dist/human.node.js", "node": {
"module": "dist/human.esm.js", "require": "./dist/human.node.js",
"browser": "dist/human.esm.js", "import": "./dist/human.node.js",
"module": "./dist/human.node.js"
},
"require": "./dist/human.node.js",
"import": "./dist/human.esm.js",
"script": "./dist/human.js",
"module": "./dist/human.esm.js",
"types": "./types/human.d.ts"
} }
``` ```
@ -55,6 +63,7 @@ Defaults:
Simply download `dist/human.js`, include it in your `HTML` file & it's ready to use. Simply download `dist/human.js`, include it in your `HTML` file & it's ready to use.
```html ```html
<!DOCTYPE HTML>
<script src="dist/human.js"><script> <script src="dist/human.js"><script>
``` ```
@ -78,6 +87,7 @@ This way you can also use `Human` library within embbedded `<script>` tag within
You could use same syntax within your main `JS` file if it's imported with `<script type="module">` You could use same syntax within your main `JS` file if it's imported with `<script type="module">`
```html ```html
<!DOCTYPE HTML>
<script src="./index.js" type="module"> <script src="./index.js" type="module">
``` ```
@ -99,6 +109,7 @@ Install with:
npm install @vladmandic/human npm install @vladmandic/human
``` ```
<!-- eslint-skip -->
```js ```js
import Human from '@vladmandic/human'; // points to @vladmandic/human/dist/human.esm.js import Human from '@vladmandic/human'; // points to @vladmandic/human/dist/human.esm.js
// you can also force-load specific version // you can also force-load specific version
@ -115,7 +126,7 @@ Install with:
``` ```
```js ```js
import tf from '@tensorflow/tfjs' import tf from '@tensorflow/tfjs';
import Human from '@vladmandic/human/dist/human.esm-nobundle.js'; // same functionality as default import, but without tfjs bundled import Human from '@vladmandic/human/dist/human.esm-nobundle.js'; // same functionality as default import, but without tfjs bundled
const human = new Human(); const human = new Human();
``` ```
@ -170,7 +181,7 @@ For example:
```js ```js
const config = { const config = {
body: { enabled: true, modelPath: 'file://models.json' }, body: { enabled: true, modelPath: 'file://models.json' },
} };
``` ```
<br> <br>

@ -41,13 +41,15 @@ Example:
- ESBuild configuration: - ESBuild configuration:
```json <!-- eslint-skip -->
```js
{ external: ['fs', 'buffer', 'util', 'os'] } { external: ['fs', 'buffer', 'util', 'os'] }
``` ```
- WebPack configuration: - WebPack configuration:
```json <!-- eslint-skip -->
```js
externals: { externals: {
'fs': 'commonjs2 fs', 'fs': 'commonjs2 fs',
'buffer': 'commonjs2 buffer', 'buffer': 'commonjs2 buffer',

@ -16,14 +16,16 @@ Each module is fully self enclosed:
Define module that implements `load` and `predict` async methods: Define module that implements `load` and `predict` async methods:
```js <!-- eslint-skip -->
```ts
export async function load(config: Config | any) { ... } export async function load(config: Config | any) { ... }
``` ```
- loads specific model using `modelPath` configuration - loads specific model using `modelPath` configuration
- returns `tf.GraphModel` - returns `tf.GraphModel`
```js <!-- eslint-skip -->
```ts
export async function predict(image: Tensor, config: Config, idx: number, count: number) { ... } export async function predict(image: Tensor, config: Config, idx: number, count: number) { ... }
``` ```

@ -4,6 +4,7 @@ If `config.profile` is enabled, call to `human.profileData()` will return detail
example: example:
<!-- eslint-skip -->
```js ```js
result = { result = {
{age: {…}, gender: {…}, emotion: {…}} {age: {…}, gender: {…}, emotion: {…}}

@ -18,6 +18,7 @@ Full documentation:
Overview of `Result` object type: Overview of `Result` object type:
<!-- eslint-skip -->
```ts ```ts
interface Result { interface Result {
/** {@link FaceResult}: detection & analysis results */ /** {@link FaceResult}: detection & analysis results */

@ -12,6 +12,7 @@ All configuration is done in a single JSON object and all model weights are dyna
There is only *ONE* method you need: There is only *ONE* method you need:
<!-- eslint-skip -->
```js ```js
const human = new Human(config?) // create instance of human const human = new Human(config?) // create instance of human
const result = await human.detect(input, config?) // run single detection const result = await human.detect(input, config?) // run single detection
@ -19,6 +20,7 @@ There is only *ONE* method you need:
or or
<!-- eslint-skip -->
```js ```js
const human = new Human(config?) // create instance of human const human = new Human(config?) // create instance of human
await human.video(input, config?) // run detection loop on input video await human.video(input, config?) // run detection loop on input video
@ -47,6 +49,7 @@ Standard workflow:
Methods exported by `Human` library: Methods exported by `Human` library:
<!-- eslint-skip -->
```js ```js
human.detect(input, config?); // run processing on single image/frame human.detect(input, config?); // run processing on single image/frame
human.video(input, config?); // run processing continously on video human.video(input, config?); // run processing continously on video
@ -59,15 +62,16 @@ Methods used for **face recognition** and **face matching**:
For details, see [embedding documentation](https://github.com/vladmandic/human/wiki/Embedding) For details, see [embedding documentation](https://github.com/vladmandic/human/wiki/Embedding)
```js ```js
human.match.similarity(descriptor1, descriptor2) // runs similarity calculation between two provided embedding vectors human.match.similarity(descriptor1, descriptor2); // runs similarity calculation between two provided embedding vectors
// vectors for source and target must be previously detected using // vectors for source and target must be previously detected using
// face.description module // face.description module
human.match.find(descriptor, descriptors) // finds best match for current face in a provided list of faces human.match.find(descriptor, descriptors); // finds best match for current face in a provided list of faces
``` ```
Methods used for **body segmentation**, **background removal** or **background replacement** Methods used for **body segmentation**, **background removal** or **background replacement**
For details, see [segmentation documentation](https://vladmandic.github.io/human/typedoc/classes/Human.html#segmentation) For details, see [segmentation documentation](https://vladmandic.github.io/human/typedoc/classes/Human.html#segmentation)
<!-- eslint-skip -->
```js ```js
human.segmentation(input, config?) // runs body segmentation and returns processed image tensor human.segmentation(input, config?) // runs body segmentation and returns processed image tensor
// which can be foreground-only, alpha-only or blended image // which can be foreground-only, alpha-only or blended image
@ -80,6 +84,7 @@ For details, see:
- [Draw methods documentation](https://github.com/vladmandic/human/wiki/Draw) | [Draw options](https://vladmandic.github.io/human/typedoc/interfaces/DrawOptions.html) - [Draw methods documentation](https://github.com/vladmandic/human/wiki/Draw) | [Draw options](https://vladmandic.github.io/human/typedoc/interfaces/DrawOptions.html)
- [WebCam API specs](https://vladmandic.github.io/human/typedoc/classes/WebCam.html) - [WebCam API specs](https://vladmandic.github.io/human/typedoc/classes/WebCam.html)
<!-- eslint-skip -->
```js ```js
human.webcam.* // helper methods to control webcam, main properties are `start`, `stop`, `play`, `pause` human.webcam.* // helper methods to control webcam, main properties are `start`, `stop`, `play`, `pause`
human.draw.* // helper methods to draw detected results to canvas, main options are `options`, `canvas`, `all` human.draw.* // helper methods to draw detected results to canvas, main options are `options`, `canvas`, `all`
@ -89,6 +94,7 @@ For details, see:
Methods that are typically called as part of standard workflow and do not need to be called manually Methods that are typically called as part of standard workflow and do not need to be called manually
<!-- eslint-skip -->
```js ```js
human.validate(config?); // validate human configuration human.validate(config?); // validate human configuration
human.init(config?); // initialize human and processing backend human.init(config?); // initialize human and processing backend
@ -115,18 +121,19 @@ Utility methods that are typically not directly used except in advanced or troub
`Human` library exposes several dynamically generated properties: `Human` library exposes several dynamically generated properties:
<!-- eslint-skip -->
```js ```js
human.version // string containing version of human library human.version; // string containing version of human library
human.config // access to current configuration object human.config; // access to current configuration object
// normally set during call to constructor or as parameter to detect() // normally set during call to constructor or as parameter to detect()
human.result // access to last known result object, normally returned via call to detect() human.result; // access to last known result object, normally returned via call to detect()
human.performance // access to current performance counters human.performance; // access to current performance counters
human.state // <string> describing current operation in progress human.state; // <string> describing current operation in progress
// progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle' // progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
human.models // dynamically maintained list of loaded models human.models; // dynamically maintained list of loaded models
human.env // detected platform environment and capabilities human.env; // detected platform environment and capabilities
human.events // container for events dispateched by human human.events; // container for events dispateched by human
Human.defaults // static property of Human class that contains default configuration Human.defaults; // static property of Human class that contains default configuration
``` ```
## TensorFlow ## TensorFlow
@ -134,8 +141,9 @@ Human.defaults // static property of Human class that contains default conf
`Human` internally uses `TensorFlow/JS` for all ML processing `Human` internally uses `TensorFlow/JS` for all ML processing
Access to namespace of an interal instance of `tfjs` used by `human` is possible via: Access to namespace of an interal instance of `tfjs` used by `human` is possible via:
<!-- eslint-skip -->
```js ```js
human.tf // instance of tfjs used by human, can be embedded or externally loaded human.tf; // instance of tfjs used by human, can be embedded or externally loaded
``` ```
<br> <br>