update wiki pages

master
Vladimir Mandic 2022-11-18 13:14:10 -05:00
parent 6ea5ea911d
commit 4a6f9797e3
16 changed files with 170 additions and 215 deletions

@ -116,8 +116,8 @@ Cross-Origin-Embedder-Policy: require-corp
Or configure `Human` load WASM files directly from a CDN: Or configure `Human` load WASM files directly from a CDN:
```json ```js
wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/' wasmPath = 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/';
``` ```
Note that version of WASM binaries **must** match version of TFJS used by `Human` library Note that version of WASM binaries **must** match version of TFJS used by `Human` library

@ -40,6 +40,7 @@ Dev build runs following operations:
Production build is started by running `npm run build` Production build is started by running `npm run build`
<!-- eslint-skip -->
```js ```js
2022-07-18 08:21:08 DATA: Build { name: '@vladmandic/human', version: '2.9.0' } 2022-07-18 08:21:08 DATA: Build { name: '@vladmandic/human', version: '2.9.0' }
2022-07-18 08:21:08 INFO: Analyze: { modelsDir: '../human-models/models', modelsOut: 'models/models.json' } 2022-07-18 08:21:08 INFO: Analyze: { modelsDir: '../human-models/models', modelsOut: 'models/models.json' }

@ -10,6 +10,7 @@
Overview of `Config` object type: Overview of `Config` object type:
<!-- eslint-skip -->
```ts ```ts
interface Config { interface Config {
backend: string // backend engine to be used for processing backend: string // backend engine to be used for processing
@ -49,9 +50,9 @@ for example,
```js ```js
const myConfig = { const myConfig = {
baseModelPath: `https://cdn.jsdelivr.net/npm/@vladmandic/human/models/`, baseModelPath: 'https://cdn.jsdelivr.net/npm/@vladmandic/human/models/',
segmentation: { enabled: true }, segmentation: { enabled: true },
} };
const human = new Human(myConfig); const human = new Human(myConfig);
const result = await human.detect(input); const result = await human.detect(input);
``` ```

@ -18,6 +18,7 @@ By default, secure http2 web server will run on port `10031` and unsecure http s
Development environment is started by running `npm run dev` Development environment is started by running `npm run dev`
<!-- eslint-skip -->
```js ```js
2021-09-10 21:03:37 INFO: @vladmandic/human version 2.1.5 2021-09-10 21:03:37 INFO: @vladmandic/human version 2.1.5
2021-09-10 21:03:37 INFO: User: vlado Platform: linux Arch: x64 Node: v16.5.0 2021-09-10 21:03:37 INFO: User: vlado Platform: linux Arch: x64 Node: v16.5.0

50
Diag.md

@ -10,26 +10,25 @@ console.log(human.version);
## Enable console debug output ## Enable console debug output
```js ```js
const human = new Human({ debug: true }) const human = new Human({ debug: true });
``` ```
## Get current configuration ## Get current configuration
```js ```js
console.log(human.config) console.log(human.config);
``` ```
```js ```json
{ {
backend: 'tensorflow', "backend": "tensorflow",
modelBasePath: 'file://models/', "modelBasePath": "file://models/",
wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/', "wasmPath": "https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/",
debug: true, "debug": true
...
} }
``` ```
## Get current environment details ## Get current environment details
```js ```js
console.log(human.env) console.log(human.env);
``` ```
```json ```json
{ {
@ -43,17 +42,17 @@ console.log(human.env)
"wasm": {"supported":true,"simd":true,"multithread":true}, "wasm": {"supported":true,"simd":true,"multithread":true},
"webgl": {"supported":true,"version":"WebGL 2.0 (OpenGL ES 3.0 Chromium)","renderer":"WebKit WebGL"}, "webgl": {"supported":true,"version":"WebGL 2.0 (OpenGL ES 3.0 Chromium)","renderer":"WebKit WebGL"},
"webgpu": {"supported":true,"adapter":"Default"}, "webgpu": {"supported":true,"adapter":"Default"},
"kernels": [...] "kernels": []
} }
``` ```
## Get list of all models ## Get list of all models
```js ```js
const models = Object.keys(human.models).map((model) => ({ name: model, loaded: (human.models[model] !== null) })); const models = Object.keys(human.models).map((model) => ({ name: model, loaded: (human.models[model] !== null) }));
console.log(models); console.log(models);
``` ```
```js ```js
[ models = [
{ name: 'face', loaded: true }, { name: 'face', loaded: true },
{ name: 'posenet', loaded: false }, { name: 'posenet', loaded: false },
{ name: 'blazepose', loaded: false }, { name: 'blazepose', loaded: false },
@ -68,41 +67,36 @@ console.log(human.env)
{ name: 'centernet', loaded: false }, { name: 'centernet', loaded: false },
{ name: 'faceres', loaded: true }, { name: 'faceres', loaded: true },
{ name: 'segmentation', loaded: false }, { name: 'segmentation', loaded: false },
] ];
``` ```
## Get memory usage information ## Get memory usage information
```js ```js
console.log(human.tf.engine().memory())); console.log(human.tf.engine().memory());
``` ```
```js ```js
{ memory = { numTensors: 1053, numDataBuffers: 1053, numBytes: 42736024 };
numTensors: 1053, numDataBuffers: 1053, numBytes: 42736024
}
``` ```
## Get current TensorFlow flags ## Get current TensorFlow flags
```js ```js
console.log(human.tf.ENV.flags); console.log(human.tf.ENV.flags);
``` ```
```js ```js
{ flags = { DEBUG: false, PROD: true, CPU_HANDOFF_SIZE_THRESHOLD: 128 };
DEBUG: false, PROD: true, CPU_HANDOFF_SIZE_THRESHOLD: 128
}
``` ```
## Get performance information ## Get performance information
```js ```js
const result = await human.detect(input); const result = await human.detect(input);
console.log(result.performance); console.log(result.performance);
``` ```
```js ```js
{ performance = {
backend: 1, load: 283, image: 1, frames: 1, cached: 0, changed: 1, total: 947, draw: 0, backend: 1, load: 283, image: 1, frames: 1, cached: 0, changed: 1, total: 947, draw: 0, face: 390, emotion: 15, embedding: 97, body: 97, hand: 142, object: 312, gesture: 0,
face: 390, emotion: 15, embedding: 97, body: 97, hand: 142, object: 312, gesture: 0 };
}
``` ```
## All possible fatal errors ## All possible fatal errors

@ -27,6 +27,7 @@ If `options.drawLabels` is enabled (default)
## Default Label Templates ## Default Label Templates
```js ```js
drawOptions = {
faceLabels: `face faceLabels: `face
confidence: [score]% confidence: [score]%
[gender] [genderScore]% [gender] [genderScore]%
@ -43,15 +44,14 @@ If `options.drawLabels` is enabled (default)
handLabels: '[label] [score]%', handLabels: '[label] [score]%',
fingerLabels: '[label]', fingerLabels: '[label]',
gestureLabels: '[where] [who]: [what]', gestureLabels: '[where] [who]: [what]',
};
``` ```
## Example ## Example
Example of custom labels: Example of custom labels:
```js ```js
import * as Human from '@vladmandic/human'; const drawOptions = {
...
const drawOptions: Partial<Human.DrawOptions> = {
bodyLabels: `person confidence is [score]% and has ${human.result?.body?.[0]?.keypoints.length || 'no'} keypoints`, bodyLabels: `person confidence is [score]% and has ${human.result?.body?.[0]?.keypoints.length || 'no'} keypoints`,
}; };
human.draw.all(dom.canvas, human.result, drawOptions); human.draw.all(dom.canvas, human.result, drawOptions);

@ -81,6 +81,7 @@ Similarity function is based on general *Minkowski distance* between all points
Changing `order` can make similarity matching more or less sensitive (default order is 2nd order) Changing `order` can make similarity matching more or less sensitive (default order is 2nd order)
For example, those will produce slighly different results: For example, those will produce slighly different results:
<!-- eslint-skip -->
```js ```js
const similarity2ndOrder = human.match.similarity(firstEmbedding, secondEmbedding, { order = 2 }); const similarity2ndOrder = human.match.similarity(firstEmbedding, secondEmbedding, { order = 2 });
const similarity3rdOrder = human.match.similarity(firstEmbedding, secondEmbedding, { order = 3 }); const similarity3rdOrder = human.match.similarity(firstEmbedding, secondEmbedding, { order = 3 });
@ -96,9 +97,9 @@ to be used at the later time to find the best match for any given face
For example: For example:
```js ```js
const db = []; const db = [];
const res = await human.detect(input); const res = await human.detect(input);
db.push({ label: 'this-is-me', embedding: res.face[0].embedding }); db.push({ label: 'this-is-me', embedding: res.face[0].embedding });
``` ```
Note that you can have multiple entries for the same person and best match will be used Note that you can have multiple entries for the same person and best match will be used
@ -106,10 +107,10 @@ Note that you can have multiple entries for the same person and best match will
To find the best match, simply use `match` method while providing embedding descriptor to compare and pre-prepared list of descriptors To find the best match, simply use `match` method while providing embedding descriptor to compare and pre-prepared list of descriptors
```js ```js
const embeddingArray = db.map((record) => record.embedding); // build array with just embeddings const embeddingArray = db.map((record) => record.embedding); // build array with just embeddings
const best = human.match.find(embedding, embeddingArray); // return is object: { index: number, similarity: number, distance: number } const best = human.match.find(embedding, embeddingArray); // return is object: { index: number, similarity: number, distance: number }
const label = embeddingArray[best.index].label; const label = embeddingArray[best.index].label;
console.log({ name, similarity: best.similarity }); console.log({ name, similarity: best.similarity });
``` ```
Database can be further stored in a JS or JSON file and retrieved when needed to have Database can be further stored in a JS or JSON file and retrieved when needed to have
@ -118,14 +119,14 @@ For example, see `/demo/facematch/facematch.js` and example database `/demo/face
> download db with known faces using http/https > download db with known faces using http/https
```js ```js
let res = await fetch('/demo/facematch/faces.json'); const res = await fetch('/demo/facematch/faces.json');
db = (res && res.ok) ? await res.json() : []; db = (res && res.ok) ? await res.json() : [];
``` ```
> download db with known faces from a local file > download db with known faces from a local file
```js ```js
const fs = require('fs'); const fs = require('fs');
const buffer = fs.readFileSync('/demo/facematch/faces.json'); const buffer = fs.readFileSync('/demo/facematch/faces.json');
db = JSON.parse(buffer); db = JSON.parse(buffer);
``` ```
<br> <br>

@ -26,14 +26,14 @@ There are three pre-defined methods:
Example output of `result.gesture`: Example output of `result.gesture`:
```js ```js
[ gesture = [
{face: "0", gesture: "facing camera"} { face: '0', gesture: 'facing camera' },
{face: "0", gesture: "head up"} { face: '0', gesture: 'head up' },
{iris: "0", gesture: "looking center"} { iris: '0', gesture: 'looking center' },
{body: "0", gesture: "i give up"} { body: '0', gesture: 'i give up' },
{body: "0", gesture: "leaning left"} { body: '0', gesture: 'leaning left' },
{hand: "0", gesture: "thumb forward middlefinger up"} { hand: '0', gesture: 'thumb forward middlefinger up' },
] ];
``` ```
Where number after gesture refers to number of person that detection belongs to in scenes with multiple people. Where number after gesture refers to number of person that detection belongs to in scenes with multiple people.

143
Home.md

@ -25,6 +25,8 @@
- Simple unified API - Simple unified API
- Built-in Image, Video and WebCam handling - Built-in Image, Video and WebCam handling
[*Jump to Quick Start*](#quick-start)
<br> <br>
## Compatibility ## Compatibility
@ -138,76 +140,6 @@
*Suggestions are welcome!* *Suggestions are welcome!*
<hr><br>
## App Examples
Visit [Examples gallery](https://vladmandic.github.io/human/samples/index.html) for more examples
<https://vladmandic.github.io/human/samples/index.html>
![samples](assets/samples.jpg)
<br>
## Options
All options as presented in the demo application...
> [demo/index.html](demo/index.html)
![Options visible in demo](assets/screenshot-menu.png)
<br>
**Results Browser:**
[ *Demo -> Display -> Show Results* ]<br>
![Results](assets/screenshot-results.png)
<br>
## Advanced Examples
1. **Face Similarity Matching:**
Extracts all faces from provided input images,
sorts them by similarity to selected face
and optionally matches detected face with database of known people to guess their names
> [demo/facematch](demo/facematch/index.html)
![Face Matching](assets/screenshot-facematch.jpg)
2. **Face ID:**
Performs validation check on a webcam input to detect a real face and matches it to known faces stored in database
> [demo/faceid](demo/faceid/index.html)
![Face Matching](assets/screenshot-faceid.jpg)
<br>
3. **3D Rendering:**
> [human-motion](https://github.com/vladmandic/human-motion)
![Face3D](https://github.com/vladmandic/human-motion/raw/main/assets/screenshot-face.jpg)
![Body3D](https://github.com/vladmandic/human-motion/raw/main/assets/screenshot-body.jpg)
![Hand3D](https://github.com/vladmandic/human-motion/raw/main/assets/screenshot-hand.jpg)
<br>
4. **VR Model Tracking:**
> [human-three-vrm](https://github.com/vladmandic/human-three-vrm)
> [human-bjs-vrm](https://github.com/vladmandic/human-bjs-vrm)
![ThreeVRM](https://github.com/vladmandic/human-three-vrm/raw/main/assets/human-vrm-screenshot.jpg)
5. **Human as OS native application:**
> [human-electron](https://github.com/vladmandic/human-electron)
<br>
**468-Point Face Mesh Defails:**
(view in full resolution to see keypoints)
![FaceMesh](assets/facemesh.png)
<br><hr><br> <br><hr><br>
## Quick Start ## Quick Start
@ -216,33 +148,16 @@ Simply load `Human` (*IIFE version*) directly from a cloud CDN in your HTML file
(pick one: `jsdelirv`, `unpkg` or `cdnjs`) (pick one: `jsdelirv`, `unpkg` or `cdnjs`)
```html ```html
<!DOCTYPE HTML>
<script src="https://cdn.jsdelivr.net/npm/@vladmandic/human/dist/human.js"></script> <script src="https://cdn.jsdelivr.net/npm/@vladmandic/human/dist/human.js"></script>
<script src="https://unpkg.dev/@vladmandic/human/dist/human.js"></script> <script src="https://unpkg.dev/@vladmandic/human/dist/human.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/human/2.1.5/human.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/human/3.0.0/human.js"></script>
``` ```
For details, including how to use `Browser ESM` version or `NodeJS` version of `Human`, see [**Installation**](https://github.com/vladmandic/human/wiki/Install) For details, including how to use `Browser ESM` version or `NodeJS` version of `Human`, see [**Installation**](https://github.com/vladmandic/human/wiki/Install)
<br> <br>
## Inputs
`Human` library can process all known input types:
- `Image`, `ImageData`, `ImageBitmap`, `Canvas`, `OffscreenCanvas`, `Tensor`,
- `HTMLImageElement`, `HTMLCanvasElement`, `HTMLVideoElement`, `HTMLMediaElement`
Additionally, `HTMLVideoElement`, `HTMLMediaElement` can be a standard `<video>` tag that links to:
- WebCam on user's system
- Any supported video type
e.g. `.mp4`, `.avi`, etc.
- Additional video types supported via *HTML5 Media Source Extensions*
e.g.: **HLS** (*HTTP Live Streaming*) using `hls.js` or **DASH** (*Dynamic Adaptive Streaming over HTTP*) using `dash.js`
- **WebRTC** media track using built-in support
<br><hr><br>
## Code Examples ## Code Examples
Simple app that uses Human to process video input and Simple app that uses Human to process video input and
@ -270,6 +185,7 @@ function detectVideo() {
human.draw.gesture(outputCanvas, result.gesture); human.draw.gesture(outputCanvas, result.gesture);
// and loop immediate to the next frame // and loop immediate to the next frame
requestAnimationFrame(detectVideo); requestAnimationFrame(detectVideo);
return result;
}); });
} }
@ -309,7 +225,7 @@ human.events.addEventListener('detect', () => { // event gets triggered when det
function detectVideo() { function detectVideo() {
human.detect(inputVideo) // run detection human.detect(inputVideo) // run detection
.then(() => requestAnimationFrame(detectVideo)); // upon detect complete start processing of the next frame .then(() => requestAnimationFrame(detectVideo)); // upon detect complete start processing of the next frame
} }
detectVideo(); // start loop detectVideo(); // start loop
@ -379,6 +295,33 @@ And for even better results, you can run detection in a separate web worker thre
<br><hr><br> <br><hr><br>
## Inputs
`Human` library can process all known input types:
- `Image`, `ImageData`, `ImageBitmap`, `Canvas`, `OffscreenCanvas`, `Tensor`,
- `HTMLImageElement`, `HTMLCanvasElement`, `HTMLVideoElement`, `HTMLMediaElement`
Additionally, `HTMLVideoElement`, `HTMLMediaElement` can be a standard `<video>` tag that links to:
- WebCam on user's system
- Any supported video type
e.g. `.mp4`, `.avi`, etc.
- Additional video types supported via *HTML5 Media Source Extensions*
e.g.: **HLS** (*HTTP Live Streaming*) using `hls.js` or **DASH** (*Dynamic Adaptive Streaming over HTTP*) using `dash.js`
- **WebRTC** media track using built-in support
<br><hr><br>
## Detailed Usage
- [**Wiki Home**](https://github.com/vladmandic/human/wiki)
- [**List of all available methods, properies and namespaces**](https://github.com/vladmandic/human/wiki/Usage)
- [**TypeDoc API Specification - Main class**](https://vladmandic.github.io/human/typedoc/classes/Human.html)
- [**TypeDoc API Specification - Full**](https://vladmandic.github.io/human/typedoc/)
<br><hr><br>
## TypeDefs ## TypeDefs
`Human` is written using TypeScript strong typing and ships with full **TypeDefs** for all classes defined by the library bundled in `types/human.d.ts` and enabled by default `Human` is written using TypeScript strong typing and ships with full **TypeDefs** for all classes defined by the library bundled in `types/human.d.ts` and enabled by default
@ -386,16 +329,13 @@ And for even better results, you can run detection in a separate web worker thre
*Note*: This does not include embedded `tfjs` *Note*: This does not include embedded `tfjs`
If you want to use embedded `tfjs` inside `Human` (`human.tf` namespace) and still full **typedefs**, add this code: If you want to use embedded `tfjs` inside `Human` (`human.tf` namespace) and still full **typedefs**, add this code:
```js > import type * as tfjs from '@vladmandic/human/dist/tfjs.esm';
import type * as tfjs from '@vladmandic/human/dist/tfjs.esm'; > const tf = human.tf as typeof tfjs;
...
const tf = human.tf as typeof tfjs;
```
This is not enabled by default as `Human` does not ship with full **TFJS TypeDefs** due to size considerations This is not enabled by default as `Human` does not ship with full **TFJS TypeDefs** due to size considerations
Enabling `tfjs` TypeDefs as above creates additional project (dev-only as only types are required) dependencies as defined in `@vladmandic/human/dist/tfjs.esm.d.ts`: Enabling `tfjs` TypeDefs as above creates additional project (dev-only as only types are required) dependencies as defined in `@vladmandic/human/dist/tfjs.esm.d.ts`:
@tensorflow/tfjs-core, @tensorflow/tfjs-converter, @tensorflow/tfjs-backend-wasm, @tensorflow/tfjs-backend-webgl > @tensorflow/tfjs-core, @tensorflow/tfjs-converter, @tensorflow/tfjs-backend-wasm, @tensorflow/tfjs-backend-webgl
<br><hr><br> <br><hr><br>
@ -427,7 +367,7 @@ For more info, see [**Configuration Details**](https://github.com/vladmandic/hum
<br><hr><br> <br><hr><br>
`Human` library is written in [TypeScript](https://www.typescriptlang.org/docs/handbook/intro.html) **4.8** using [TensorFlow/JS](https://www.tensorflow.org/js/) **4.0** and conforming to latest `JavaScript` [ECMAScript version 2022](https://262.ecma-international.org/) standard `Human` library is written in [TypeScript](https://www.typescriptlang.org/docs/handbook/intro.html) **4.9** using [TensorFlow/JS](https://www.tensorflow.org/js/) **4.0** and conforming to latest `JavaScript` [ECMAScript version 2022](https://262.ecma-international.org/) standard
Build target for distributables is `JavaScript` [EMCAScript version 2018](https://262.ecma-international.org/9.0/) Build target for distributables is `JavaScript` [EMCAScript version 2018](https://262.ecma-international.org/9.0/)
@ -437,12 +377,3 @@ For details see [**Wiki Pages**](https://github.com/vladmandic/human/wiki)
and [**API Specification**](https://vladmandic.github.io/human/typedoc/classes/Human.html) and [**API Specification**](https://vladmandic.github.io/human/typedoc/classes/Human.html)
<br> <br>
[![](https://img.shields.io/static/v1?label=Sponsor&message=%E2%9D%A4&logo=GitHub&color=%23fe8e86)](https://github.com/sponsors/vladmandic)
![Stars](https://img.shields.io/github/stars/vladmandic/human?style=flat-square&svg=true)
![Forks](https://badgen.net/github/forks/vladmandic/human)
![Code Size](https://img.shields.io/github/languages/code-size/vladmandic/human?style=flat-square&svg=true)
![CDN](https://data.jsdelivr.com/v1/package/npm/@vladmandic/human/badge)<br>
![Downloads](https://img.shields.io/npm/dw/@vladmandic/human.png?style=flat-square)
![Downloads](https://img.shields.io/npm/dm/@vladmandic/human.png?style=flat-square)
![Downloads](https://img.shields.io/npm/dy/@vladmandic/human.png?style=flat-square)

@ -9,6 +9,7 @@
`Human` allows input to be in many different formats and will perform automatic processing of inputs to interally required format `Human` allows input to be in many different formats and will perform automatic processing of inputs to interally required format
<!-- eslint-skip -->
```ts ```ts
type Input = Tensor | AnyCanvas | AnyImage | AnyVideo | ImageObjects | ExternalCanvas; type Input = Tensor | AnyCanvas | AnyImage | AnyVideo | ImageObjects | ExternalCanvas;
type AnyCanvas = HTMLCanvasElement | OffscreenCanvas; type AnyCanvas = HTMLCanvasElement | OffscreenCanvas;
@ -69,10 +70,10 @@ Using `node-canvas` to load and decode input files only
```js ```js
const canvas = require('canvas'); const canvas = require('canvas');
const img = await canvas.loadImage(inputFile); // read and decode image file const img = await canvas.loadImage(inputFile); // read and decode image file
const canvas = canvas.createCanvas(img.width, img.height); // create canvas element const myCanvas = canvas.createCanvas(img.width, img.height); // create canvas element
const ctx = canvas.getContext('2d'); const ctx = myCanvas.getContext('2d');
ctx.drawImage(img, 0, 0, img.width, img.height); // draw loaded image onto canvas ctx.drawImage(img, 0, 0, img.width, img.height); // draw loaded image onto canvas
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height); // read pixel data from canvas const imageData = ctx.getImageData(0, 0, myCanvas.width, myCanvas.height); // read pixel data from canvas
const tensor = human.tf.tensor(imageData.data); // create tensor from pixel data const tensor = human.tf.tensor(imageData.data); // create tensor from pixel data
const result = await human.detect(tensor); // perform processing const result = await human.detect(tensor); // perform processing
human.tf.dispose(tensor); // dispose input data, required when working with tensors human.tf.dispose(tensor); // dispose input data, required when working with tensors

@ -17,6 +17,7 @@ Simply load `Human` (*IIFE version*) directly from a cloud CDN in your HTML file
(pick one: `jsdelirv`, `unpkg` or `cdnjs`) (pick one: `jsdelirv`, `unpkg` or `cdnjs`)
```html ```html
<!DOCTYPE HTML>
<script src="https://cdn.jsdelivr.net/npm/@vladmandic/human/dist/human.js"></script> <script src="https://cdn.jsdelivr.net/npm/@vladmandic/human/dist/human.js"></script>
<script src="https://unpkg.dev/@vladmandic/human/dist/human.js"></script> <script src="https://unpkg.dev/@vladmandic/human/dist/human.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/human/1.4.1/human.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/human/1.4.1/human.js"></script>
@ -39,11 +40,18 @@ All versions include `sourcemap` *(.map)*
Defaults: Defaults:
```json ```json
{ {
"main": "dist/human.node.js", "node": {
"module": "dist/human.esm.js", "require": "./dist/human.node.js",
"browser": "dist/human.esm.js", "import": "./dist/human.node.js",
} "module": "./dist/human.node.js"
},
"require": "./dist/human.node.js",
"import": "./dist/human.esm.js",
"script": "./dist/human.js",
"module": "./dist/human.esm.js",
"types": "./types/human.d.ts"
}
``` ```
<br> <br>
@ -55,14 +63,15 @@ Defaults:
Simply download `dist/human.js`, include it in your `HTML` file & it's ready to use. Simply download `dist/human.js`, include it in your `HTML` file & it's ready to use.
```html ```html
<script src="dist/human.js"><script> <!DOCTYPE HTML>
<script src="dist/human.js"><script>
``` ```
IIFE script auto-registers global namespace `Human` within global `Window` object IIFE script auto-registers global namespace `Human` within global `Window` object
Which you can use to create instance of `human` library: Which you can use to create instance of `human` library:
```js ```js
const human = new Human(); const human = new Human();
``` ```
This way you can also use `Human` library within embbedded `<script>` tag within your `html` page for all-in-one approach This way you can also use `Human` library within embbedded `<script>` tag within your `html` page for all-in-one approach
@ -78,14 +87,15 @@ This way you can also use `Human` library within embbedded `<script>` tag within
You could use same syntax within your main `JS` file if it's imported with `<script type="module">` You could use same syntax within your main `JS` file if it's imported with `<script type="module">`
```html ```html
<script src="./index.js" type="module"> <!DOCTYPE HTML>
<script src="./index.js" type="module">
``` ```
and then in your `index.js` and then in your `index.js`
```js ```js
import Human from 'dist/human.esm.js'; // for direct import must use path to module, not package name import Human from 'dist/human.esm.js'; // for direct import must use path to module, not package name
const human = new Human(); const human = new Human();
``` ```
### **2.2 With Bundler** ### **2.2 With Bundler**
@ -99,11 +109,12 @@ Install with:
npm install @vladmandic/human npm install @vladmandic/human
``` ```
<!-- eslint-skip -->
```js ```js
import Human from '@vladmandic/human'; // points to @vladmandic/human/dist/human.esm.js import Human from '@vladmandic/human'; // points to @vladmandic/human/dist/human.esm.js
// you can also force-load specific version // you can also force-load specific version
// for example: `@vladmandic/human/dist/human.esm-nobundle.js` // for example: `@vladmandic/human/dist/human.esm-nobundle.js`
const human = new Human(); const human = new Human();
``` ```
Or if you prefer to package your version of `tfjs`, you can use `nobundle` version Or if you prefer to package your version of `tfjs`, you can use `nobundle` version
@ -115,9 +126,9 @@ Install with:
``` ```
```js ```js
import tf from '@tensorflow/tfjs' import tf from '@tensorflow/tfjs';
import Human from '@vladmandic/human/dist/human.esm-nobundle.js'; // same functionality as default import, but without tfjs bundled import Human from '@vladmandic/human/dist/human.esm-nobundle.js'; // same functionality as default import, but without tfjs bundled
const human = new Human(); const human = new Human();
``` ```
*Note: When using a named import in a TypeScript project, it is advisable to instruct TypeScript where to look for type definitions using explict path to types* *Note: When using a named import in a TypeScript project, it is advisable to instruct TypeScript where to look for type definitions using explict path to types*
@ -144,9 +155,9 @@ Install with:
And then use with: And then use with:
```js ```js
const tf = require('@tensorflow/tfjs-node'); // can also use '@tensorflow/tfjs-node-gpu' if you have environment with CUDA extensions const tf = require('@tensorflow/tfjs-node'); // can also use '@tensorflow/tfjs-node-gpu' if you have environment with CUDA extensions
const Human = require('@vladmandic/human').default; // points to @vladmandic/human/dist/human.node.js const Human = require('@vladmandic/human').default; // points to @vladmandic/human/dist/human.node.js
const human = new Human(); const human = new Human();
``` ```
Or for CUDA accelerated NodeJS backend: Or for CUDA accelerated NodeJS backend:
@ -158,9 +169,9 @@ Or for CUDA accelerated NodeJS backend:
And then use with: And then use with:
```js ```js
const tf = require('@tensorflow/tfjs-node-gpu'); // can also use '@tensorflow/tfjs-node-gpu' if you have environment with CUDA extensions const tf = require('@tensorflow/tfjs-node-gpu'); // can also use '@tensorflow/tfjs-node-gpu' if you have environment with CUDA extensions
const Human = require('@vladmandic/human/dist/human.node-gpu.js').default; // points to @vladmandic/human/dist/human.node.js const Human = require('@vladmandic/human/dist/human.node-gpu.js').default; // points to @vladmandic/human/dist/human.node.js
const human = new Human(); const human = new Human();
``` ```
Since NodeJS projects load `weights` from local filesystem instead of using `http` calls, you must modify default configuration to include correct paths with `file://` prefix Since NodeJS projects load `weights` from local filesystem instead of using `http` calls, you must modify default configuration to include correct paths with `file://` prefix
@ -170,7 +181,7 @@ For example:
```js ```js
const config = { const config = {
body: { enabled: true, modelPath: 'file://models.json' }, body: { enabled: true, modelPath: 'file://models.json' },
} };
``` ```
<br> <br>

@ -41,13 +41,15 @@ Example:
- ESBuild configuration: - ESBuild configuration:
```json <!-- eslint-skip -->
```js
{ external: ['fs', 'buffer', 'util', 'os'] } { external: ['fs', 'buffer', 'util', 'os'] }
``` ```
- WebPack configuration: - WebPack configuration:
```json <!-- eslint-skip -->
```js
externals: { externals: {
'fs': 'commonjs2 fs', 'fs': 'commonjs2 fs',
'buffer': 'commonjs2 buffer', 'buffer': 'commonjs2 buffer',

@ -16,14 +16,16 @@ Each module is fully self enclosed:
Define module that implements `load` and `predict` async methods: Define module that implements `load` and `predict` async methods:
```js <!-- eslint-skip -->
```ts
export async function load(config: Config | any) { ... } export async function load(config: Config | any) { ... }
``` ```
- loads specific model using `modelPath` configuration - loads specific model using `modelPath` configuration
- returns `tf.GraphModel` - returns `tf.GraphModel`
```js <!-- eslint-skip -->
```ts
export async function predict(image: Tensor, config: Config, idx: number, count: number) { ... } export async function predict(image: Tensor, config: Config, idx: number, count: number) { ... }
``` ```

@ -4,6 +4,7 @@ If `config.profile` is enabled, call to `human.profileData()` will return detail
example: example:
<!-- eslint-skip -->
```js ```js
result = { result = {
{age: {…}, gender: {…}, emotion: {…}} {age: {…}, gender: {…}, emotion: {…}}

@ -18,6 +18,7 @@ Full documentation:
Overview of `Result` object type: Overview of `Result` object type:
<!-- eslint-skip -->
```ts ```ts
interface Result { interface Result {
/** {@link FaceResult}: detection & analysis results */ /** {@link FaceResult}: detection & analysis results */

@ -12,6 +12,7 @@ All configuration is done in a single JSON object and all model weights are dyna
There is only *ONE* method you need: There is only *ONE* method you need:
<!-- eslint-skip -->
```js ```js
const human = new Human(config?) // create instance of human const human = new Human(config?) // create instance of human
const result = await human.detect(input, config?) // run single detection const result = await human.detect(input, config?) // run single detection
@ -19,6 +20,7 @@ There is only *ONE* method you need:
or or
<!-- eslint-skip -->
```js ```js
const human = new Human(config?) // create instance of human const human = new Human(config?) // create instance of human
await human.video(input, config?) // run detection loop on input video await human.video(input, config?) // run detection loop on input video
@ -47,6 +49,7 @@ Standard workflow:
Methods exported by `Human` library: Methods exported by `Human` library:
<!-- eslint-skip -->
```js ```js
human.detect(input, config?); // run processing on single image/frame human.detect(input, config?); // run processing on single image/frame
human.video(input, config?); // run processing continously on video human.video(input, config?); // run processing continously on video
@ -59,15 +62,16 @@ Methods used for **face recognition** and **face matching**:
For details, see [embedding documentation](https://github.com/vladmandic/human/wiki/Embedding) For details, see [embedding documentation](https://github.com/vladmandic/human/wiki/Embedding)
```js ```js
human.match.similarity(descriptor1, descriptor2) // runs similarity calculation between two provided embedding vectors human.match.similarity(descriptor1, descriptor2); // runs similarity calculation between two provided embedding vectors
// vectors for source and target must be previously detected using // vectors for source and target must be previously detected using
// face.description module // face.description module
human.match.find(descriptor, descriptors) // finds best match for current face in a provided list of faces human.match.find(descriptor, descriptors); // finds best match for current face in a provided list of faces
``` ```
Methods used for **body segmentation**, **background removal** or **background replacement** Methods used for **body segmentation**, **background removal** or **background replacement**
For details, see [segmentation documentation](https://vladmandic.github.io/human/typedoc/classes/Human.html#segmentation) For details, see [segmentation documentation](https://vladmandic.github.io/human/typedoc/classes/Human.html#segmentation)
<!-- eslint-skip -->
```js ```js
human.segmentation(input, config?) // runs body segmentation and returns processed image tensor human.segmentation(input, config?) // runs body segmentation and returns processed image tensor
// which can be foreground-only, alpha-only or blended image // which can be foreground-only, alpha-only or blended image
@ -80,6 +84,7 @@ For details, see:
- [Draw methods documentation](https://github.com/vladmandic/human/wiki/Draw) | [Draw options](https://vladmandic.github.io/human/typedoc/interfaces/DrawOptions.html) - [Draw methods documentation](https://github.com/vladmandic/human/wiki/Draw) | [Draw options](https://vladmandic.github.io/human/typedoc/interfaces/DrawOptions.html)
- [WebCam API specs](https://vladmandic.github.io/human/typedoc/classes/WebCam.html) - [WebCam API specs](https://vladmandic.github.io/human/typedoc/classes/WebCam.html)
<!-- eslint-skip -->
```js ```js
human.webcam.* // helper methods to control webcam, main properties are `start`, `stop`, `play`, `pause` human.webcam.* // helper methods to control webcam, main properties are `start`, `stop`, `play`, `pause`
human.draw.* // helper methods to draw detected results to canvas, main options are `options`, `canvas`, `all` human.draw.* // helper methods to draw detected results to canvas, main options are `options`, `canvas`, `all`
@ -89,6 +94,7 @@ For details, see:
Methods that are typically called as part of standard workflow and do not need to be called manually Methods that are typically called as part of standard workflow and do not need to be called manually
<!-- eslint-skip -->
```js ```js
human.validate(config?); // validate human configuration human.validate(config?); // validate human configuration
human.init(config?); // initialize human and processing backend human.init(config?); // initialize human and processing backend
@ -102,31 +108,32 @@ Methods that are typically called as part of standard workflow and do not need t
Utility methods that are typically not directly used except in advanced or troubleshooting cases Utility methods that are typically not directly used except in advanced or troubleshooting cases
```js ```js
human.analyze(); // check for memory leaks human.analyze(); // check for memory leaks
human.compare(); // compare two images for pixel similarity human.compare(); // compare two images for pixel similarity
human.now(); // utility wrapper for performance timestamp human.now(); // utility wrapper for performance timestamp
human.profile(); // run function via profiler human.profile(); // run function via profiler
human.reset(); // reset configuration human.reset(); // reset configuration
human.sleep(); // utility wrapper for sleep function human.sleep(); // utility wrapper for sleep function
human.emit(); // internal event emitter human.emit(); // internal event emitter
``` ```
## Human Properties ## Human Properties
`Human` library exposes several dynamically generated properties: `Human` library exposes several dynamically generated properties:
<!-- eslint-skip -->
```js ```js
human.version // string containing version of human library human.version; // string containing version of human library
human.config // access to current configuration object human.config; // access to current configuration object
// normally set during call to constructor or as parameter to detect() // normally set during call to constructor or as parameter to detect()
human.result // access to last known result object, normally returned via call to detect() human.result; // access to last known result object, normally returned via call to detect()
human.performance // access to current performance counters human.performance; // access to current performance counters
human.state // <string> describing current operation in progress human.state; // <string> describing current operation in progress
// progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle' // progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
human.models // dynamically maintained list of loaded models human.models; // dynamically maintained list of loaded models
human.env // detected platform environment and capabilities human.env; // detected platform environment and capabilities
human.events // container for events dispateched by human human.events; // container for events dispateched by human
Human.defaults // static property of Human class that contains default configuration Human.defaults; // static property of Human class that contains default configuration
``` ```
## TensorFlow ## TensorFlow
@ -134,8 +141,9 @@ Human.defaults // static property of Human class that contains default conf
`Human` internally uses `TensorFlow/JS` for all ML processing `Human` internally uses `TensorFlow/JS` for all ML processing
Access to namespace of an interal instance of `tfjs` used by `human` is possible via: Access to namespace of an interal instance of `tfjs` used by `human` is possible via:
<!-- eslint-skip -->
```js ```js
human.tf // instance of tfjs used by human, can be embedded or externally loaded human.tf; // instance of tfjs used by human, can be embedded or externally loaded
``` ```
<br> <br>