mirror of https://github.com/vladmandic/human
major update for 1.8 release candidate
parent
05df95fe4f
commit
66054542c2
|
@ -1,6 +1,6 @@
|
|||
# @vladmandic/human
|
||||
|
||||
Version: **1.7.1**
|
||||
Version: **1.8.0**
|
||||
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
|
@ -9,11 +9,12 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/04/25 mandic00@live.com
|
||||
|
||||
|
||||
### **1.7.1** 2021/04/25 mandic00@live.com
|
||||
|
||||
|
||||
### **origin/main** 2021/04/24 mandic00@live.com
|
||||
|
||||
- remove obsolete binary models
|
||||
- enable cross origin isolation
|
||||
- rewrite posenet decoder
|
||||
- remove efficientpose
|
||||
|
|
19
TODO.md
19
TODO.md
|
@ -17,4 +17,21 @@ N/A
|
|||
- Blazepose
|
||||
Needs detector before running pose to center the image
|
||||
|
||||
## Soon to be Removed
|
||||
## RC: 1.8
|
||||
|
||||
### Done
|
||||
|
||||
Major configuration simplification:
|
||||
|
||||
- Unified minConfidence and scoreThresdold as minConfidence
|
||||
- Replaced nmsRadius with built-in default
|
||||
- Replaced maxFaces, maxDetections, maxHands, maxResults with maxDetected
|
||||
- Remove deallocate, profile, scoped
|
||||
|
||||
Stop building sourcemaps for NodeJS deliverables
|
||||
|
||||
### TBD
|
||||
|
||||
- Remove modelPaths
|
||||
- Remove blazeface-front, replace blazeface-back with blazeface
|
||||
- NodeJS Exception handling
|
||||
|
|
|
@ -12,7 +12,7 @@ const userConfig = {
|
|||
videoOptimized: true,
|
||||
filter: { enabled: false },
|
||||
face: { enabled: true,
|
||||
detector: { rotation: false, maxFaces: 1 },
|
||||
detector: { rotation: false, maxDetected: 1 },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
description: { enabled: false },
|
||||
|
|
|
@ -492,15 +492,13 @@ function setupMenu() {
|
|||
menu.process = new Menu(document.body, '', { top, left: x[2] });
|
||||
menu.process.addList('backend', ['cpu', 'webgl', 'wasm', 'humangl'], human.config.backend, (val) => human.config.backend = val);
|
||||
menu.process.addBool('async operations', human.config, 'async', (val) => human.config.async = val);
|
||||
// menu.process.addBool('enable profiler', human.config, 'profile', (val) => human.config.profile = val);
|
||||
// menu.process.addBool('memory shield', human.config, 'deallocate', (val) => human.config.deallocate = val);
|
||||
menu.process.addBool('use web worker', ui, 'useWorker');
|
||||
menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.process.addLabel('model parameters');
|
||||
menu.process.addRange('max objects', human.config.face.detector, 'maxFaces', 1, 50, 1, (val) => {
|
||||
human.config.face.detector.maxFaces = parseInt(val);
|
||||
human.config.body.maxDetections = parseInt(val);
|
||||
human.config.hand.maxHands = parseInt(val);
|
||||
menu.process.addRange('max objects', human.config.face.detector, 'maxDetected', 1, 50, 1, (val) => {
|
||||
human.config.face.detector.maxDetected = parseInt(val);
|
||||
human.config.body.maxDetected = parseInt(val);
|
||||
human.config.hand.maxDetected = parseInt(val);
|
||||
});
|
||||
menu.process.addRange('skip frames', human.config.face.detector, 'skipFrames', 0, 50, 1, (val) => {
|
||||
human.config.face.detector.skipFrames = parseInt(val);
|
||||
|
@ -512,11 +510,6 @@ function setupMenu() {
|
|||
human.config.face.emotion.minConfidence = parseFloat(val);
|
||||
human.config.hand.minConfidence = parseFloat(val);
|
||||
});
|
||||
menu.process.addRange('score threshold', human.config.face.detector, 'scoreThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||
human.config.face.detector.scoreThreshold = parseFloat(val);
|
||||
human.config.hand.scoreThreshold = parseFloat(val);
|
||||
human.config.body.scoreThreshold = parseFloat(val);
|
||||
});
|
||||
menu.process.addRange('overlap', human.config.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||
human.config.face.detector.iouThreshold = parseFloat(val);
|
||||
human.config.hand.iouThreshold = parseFloat(val);
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@vladmandic/human",
|
||||
"version": "1.7.1",
|
||||
"version": "1.8.0",
|
||||
"description": "Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition",
|
||||
"sideEffects": false,
|
||||
"main": "dist/human.node.js",
|
||||
|
|
|
@ -49,7 +49,6 @@ const config = {
|
|||
minifyWhitespace: false,
|
||||
minifyIdentifiers: false,
|
||||
minifySyntax: false,
|
||||
sourcemap: true,
|
||||
bundle: true,
|
||||
metafile: true,
|
||||
target: 'es2018',
|
||||
|
@ -58,7 +57,6 @@ const config = {
|
|||
minifyWhitespace: true,
|
||||
minifyIdentifiers: true,
|
||||
minifySyntax: true,
|
||||
sourcemap: true,
|
||||
bundle: true,
|
||||
metafile: true,
|
||||
target: 'es2018',
|
||||
|
@ -73,6 +71,7 @@ const targets = {
|
|||
entryPoints: ['src/tfjs/tf-node.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['@tensorflow'],
|
||||
sourcemap: false,
|
||||
},
|
||||
node: {
|
||||
platform: 'node',
|
||||
|
@ -80,6 +79,7 @@ const targets = {
|
|||
entryPoints: ['src/human.ts'],
|
||||
outfile: 'dist/human.node.js',
|
||||
external: ['@tensorflow'],
|
||||
sourcemap: false,
|
||||
},
|
||||
},
|
||||
nodeGPU: {
|
||||
|
@ -89,6 +89,7 @@ const targets = {
|
|||
entryPoints: ['src/tfjs/tf-node-gpu.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['@tensorflow'],
|
||||
sourcemap: false,
|
||||
},
|
||||
node: {
|
||||
platform: 'node',
|
||||
|
@ -96,6 +97,7 @@ const targets = {
|
|||
entryPoints: ['src/human.ts'],
|
||||
outfile: 'dist/human.node-gpu.js',
|
||||
external: ['@tensorflow'],
|
||||
sourcemap: false,
|
||||
},
|
||||
},
|
||||
nodeWASM: {
|
||||
|
@ -105,6 +107,7 @@ const targets = {
|
|||
entryPoints: ['src/tfjs/tf-node-wasm.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['@tensorflow'],
|
||||
sourcemap: false,
|
||||
},
|
||||
node: {
|
||||
platform: 'node',
|
||||
|
@ -112,6 +115,7 @@ const targets = {
|
|||
entryPoints: ['src/human.ts'],
|
||||
outfile: 'dist/human.node-wasm.js',
|
||||
external: ['@tensorflow'],
|
||||
sourcemap: false,
|
||||
},
|
||||
},
|
||||
|
||||
|
@ -122,6 +126,7 @@ const targets = {
|
|||
entryPoints: ['src/tfjs/tf-browser.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['fs', 'buffer', 'util', 'os', '@tensorflow'],
|
||||
sourcemap: true,
|
||||
},
|
||||
esm: {
|
||||
platform: 'browser',
|
||||
|
@ -129,6 +134,7 @@ const targets = {
|
|||
entryPoints: ['src/human.ts'],
|
||||
outfile: 'dist/human.esm-nobundle.js',
|
||||
external: ['fs', 'buffer', 'util', 'os', '@tensorflow'],
|
||||
sourcemap: true,
|
||||
},
|
||||
},
|
||||
browserBundle: {
|
||||
|
@ -138,6 +144,7 @@ const targets = {
|
|||
entryPoints: ['src/tfjs/tf-browser.ts'],
|
||||
outfile: 'dist/tfjs.esm.js',
|
||||
external: ['fs', 'buffer', 'util', 'os'],
|
||||
sourcemap: true,
|
||||
},
|
||||
iife: {
|
||||
platform: 'browser',
|
||||
|
@ -146,6 +153,7 @@ const targets = {
|
|||
entryPoints: ['src/human.ts'],
|
||||
outfile: 'dist/human.js',
|
||||
external: ['fs', 'buffer', 'util', 'os'],
|
||||
sourcemap: true,
|
||||
},
|
||||
esm: {
|
||||
platform: 'browser',
|
||||
|
@ -153,6 +161,7 @@ const targets = {
|
|||
entryPoints: ['src/human.ts'],
|
||||
outfile: 'dist/human.esm.js',
|
||||
external: ['fs', 'buffer', 'util', 'os'],
|
||||
sourcemap: true,
|
||||
},
|
||||
/*
|
||||
demo: {
|
||||
|
|
|
@ -236,3 +236,37 @@
|
|||
2021-04-25 07:50:17 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-04-25 07:50:22 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-04-25 07:50:22 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
2021-04-25 13:08:21 [36mINFO: [39m @vladmandic/human version 1.7.1
|
||||
2021-04-25 13:08:21 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-04-25 13:08:21 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true,"sourcemap":true,"bundle":true,"metafile":true,"target":"es2018"}
|
||||
2021-04-25 13:08:21 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":733,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:08:21 [35mSTATE:[39m Build for: node type: node: {"imports":35,"importBytes":514895,"outputBytes":294872,"outputFiles":"dist/human.node.js"}
|
||||
2021-04-25 13:08:21 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":737,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:08:21 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":35,"importBytes":514899,"outputBytes":294880,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-04-25 13:08:21 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":783,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:08:21 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":35,"importBytes":514945,"outputBytes":294924,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-04-25 13:08:21 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:08:21 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":35,"importBytes":515556,"outputBytes":295030,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-04-25 13:08:22 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1267,"moduleBytes":4085087,"imports":7,"importBytes":2488,"outputBytes":1101728,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:08:23 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":35,"importBytes":1615890,"outputBytes":1393163,"outputFiles":"dist/human.js"}
|
||||
2021-04-25 13:08:23 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":35,"importBytes":1615890,"outputBytes":1393121,"outputFiles":"dist/human.esm.js"}
|
||||
2021-04-25 13:08:23 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-04-25 13:08:28 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-04-25 13:08:28 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
2021-04-25 13:13:05 [36mINFO: [39m @vladmandic/human version 1.8.0
|
||||
2021-04-25 13:13:05 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-04-25 13:13:05 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true,"bundle":true,"metafile":true,"target":"es2018"}
|
||||
2021-04-25 13:13:05 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":696,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:13:05 [35mSTATE:[39m Build for: node type: node: {"imports":35,"importBytes":514858,"outputBytes":294833,"outputFiles":"dist/human.node.js"}
|
||||
2021-04-25 13:13:05 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":700,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:13:05 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":35,"importBytes":514862,"outputBytes":294837,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-04-25 13:13:05 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":746,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:13:06 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":35,"importBytes":514908,"outputBytes":294880,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-04-25 13:13:06 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:13:06 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":35,"importBytes":515556,"outputBytes":295030,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-04-25 13:13:06 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1267,"moduleBytes":4085087,"imports":7,"importBytes":2488,"outputBytes":1101728,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-25 13:13:07 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":35,"importBytes":1615890,"outputBytes":1393163,"outputFiles":"dist/human.js"}
|
||||
2021-04-25 13:13:08 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":35,"importBytes":1615890,"outputBytes":1393121,"outputFiles":"dist/human.esm.js"}
|
||||
2021-04-25 13:13:08 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-04-25 13:13:12 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-04-25 13:13:12 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
let model;
|
||||
let last = { age: 0 };
|
||||
|
@ -31,14 +30,7 @@ export async function predict(image, config) {
|
|||
let ageT;
|
||||
const obj = { age: 0 };
|
||||
|
||||
if (!config.profile) {
|
||||
if (config.face.age.enabled) ageT = await model.predict(enhance);
|
||||
} else {
|
||||
const profileAge = config.face.age.enabled ? await tf.profile(() => model.predict(enhance)) : {};
|
||||
ageT = profileAge.result.clone();
|
||||
profileAge.result.dispose();
|
||||
profile.run('age', profileAge);
|
||||
}
|
||||
enhance.dispose();
|
||||
|
||||
if (ageT) {
|
||||
|
|
|
@ -90,7 +90,7 @@ export class BlazeFaceModel {
|
|||
const scoresOut = tf.sigmoid(logits).squeeze();
|
||||
return [batchOut, boxesOut, scoresOut];
|
||||
});
|
||||
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxFaces, this.config.face.detector.iouThreshold, this.config.face.detector.scoreThreshold);
|
||||
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
||||
const boxIndices = boxIndicesTensor.arraySync();
|
||||
boxIndicesTensor.dispose();
|
||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||
|
|
|
@ -161,7 +161,7 @@ export class Pipeline {
|
|||
if (config.videoOptimized) this.skipped++;
|
||||
|
||||
// if detector result count doesn't match current working set, use it to reset current working set
|
||||
if (!config.videoOptimized || (detector && detector.boxes && (!config.face.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.face.detector.maxFaces)))) {
|
||||
if (!config.videoOptimized || (detector && detector.boxes && (!config.face.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.face.detector.maxDetected)))) {
|
||||
this.storedBoxes = [];
|
||||
this.detectedFaces = 0;
|
||||
for (const possible of detector.boxes) {
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
import * as annotations from './annotations';
|
||||
|
||||
let model;
|
||||
|
@ -25,17 +24,9 @@ export async function predict(image, config) {
|
|||
const resize = tf.image.resizeBilinear(image, [model.width, model.height], false);
|
||||
const normalize = tf.div(resize, [255.0]);
|
||||
resize.dispose();
|
||||
let points;
|
||||
if (!config.profile) { // run through profiler or just execute
|
||||
const resT = await model.predict(normalize);
|
||||
points = resT.find((t) => (t.size === 195 || t.size === 155)).dataSync(); // order of output tensors may change between models, full has 195 and upper has 155 items
|
||||
const points = resT.find((t) => (t.size === 195 || t.size === 155)).dataSync(); // order of output tensors may change between models, full has 195 and upper has 155 items
|
||||
resT.forEach((t) => t.dispose());
|
||||
} else {
|
||||
const profileData = await tf.profile(() => model.predict(normalize));
|
||||
points = profileData.result.find((t) => (t.size === 195 || t.size === 155)).dataSync();
|
||||
profileData.result.forEach((t) => t.dispose());
|
||||
profile.run('blazepose', profileData);
|
||||
}
|
||||
normalize.dispose();
|
||||
const keypoints: Array<{ id, part, position: { x, y, z }, score, presence }> = [];
|
||||
const labels = points.length === 195 ? annotations.full : annotations.upper; // full model has 39 keypoints, upper has 31 keypoints
|
||||
|
|
148
src/config.ts
148
src/config.ts
|
@ -9,33 +9,34 @@
|
|||
export interface Config {
|
||||
/** Backend used for TFJS operations */
|
||||
backend: null | '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow',
|
||||
|
||||
/** Path to *.wasm files if backend is set to `wasm` */
|
||||
wasmPath: string,
|
||||
|
||||
/** Print debug statements to console */
|
||||
debug: boolean,
|
||||
|
||||
/** Perform model loading and inference concurrently or sequentially */
|
||||
async: boolean,
|
||||
/** Collect and print profiling data during inference operations */
|
||||
profile: boolean,
|
||||
/** Internal: Use aggressive GPU memory deallocator when backend is set to `webgl` or `humangl` */
|
||||
deallocate: boolean,
|
||||
/** Internal: Run all inference operations in an explicit local scope run to avoid memory leaks */
|
||||
scoped: boolean,
|
||||
|
||||
/** Perform additional optimizations when input is video,
|
||||
* - must be disabled for images
|
||||
* - automatically disabled for Image, ImageData, ImageBitmap and Tensor inputs
|
||||
* - skips boundary detection for every `skipFrames` frames specified for each model
|
||||
* - while maintaining in-box detection since objects don't change definition as fast */
|
||||
videoOptimized: boolean,
|
||||
|
||||
/** What to use for `human.warmup()`
|
||||
* - warmup pre-initializes all models for faster inference but can take significant time on startup
|
||||
* - only used for `webgl` and `humangl` backends
|
||||
*/
|
||||
warmup: 'none' | 'face' | 'full' | 'body',
|
||||
|
||||
/** Base model path (typically starting with file://, http:// or https://) for all models
|
||||
* - individual modelPath values are joined to this path
|
||||
* - individual modelPath values are relative to this path
|
||||
*/
|
||||
modelBasePath: string,
|
||||
|
||||
/** Run input through image filters before inference
|
||||
* - image filters run with near-zero latency as they are executed on the GPU
|
||||
*/
|
||||
|
@ -90,31 +91,30 @@ export interface Config {
|
|||
gesture: {
|
||||
enabled: boolean,
|
||||
},
|
||||
|
||||
/** Controlls and configures all face-specific options:
|
||||
* - face detection, face mesh detection, age, gender, emotion detection and face description
|
||||
* Parameters:
|
||||
* - enabled: true/false
|
||||
* - modelPath: path for individual face model
|
||||
* - modelPath: path for each of face models
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - iouThreshold: ammount of overlap between two detected objects before one object is removed
|
||||
* - maxDetected: maximum number of faces detected in the input, should be set to the minimum number for performance
|
||||
* - rotation: use calculated rotated face image or just box with rotation as-is, false means higher performance, but incorrect mesh mapping on higher face angles
|
||||
* - maxFaces: maximum number of faces detected in the input, should be set to the minimum number for performance
|
||||
* - skipFrames: how many frames to go without re-running the face detector and just run modified face mesh analysis, only valid if videoOptimized is set to true
|
||||
* - skipInitial: if previous detection resulted in no faces detected, should skipFrames be reset immediately to force new detection cycle
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - iouThreshold: threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||
* - scoreThreshold: threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||
* - return extracted face as tensor for futher user processing
|
||||
* - return: return extracted face as tensor for futher user processing
|
||||
*/
|
||||
face: {
|
||||
enabled: boolean,
|
||||
detector: {
|
||||
modelPath: string,
|
||||
rotation: boolean,
|
||||
maxFaces: number,
|
||||
maxDetected: number,
|
||||
skipFrames: number,
|
||||
skipInitial: boolean,
|
||||
minConfidence: number,
|
||||
iouThreshold: number,
|
||||
scoreThreshold: number,
|
||||
return: boolean,
|
||||
},
|
||||
mesh: {
|
||||
|
@ -138,31 +138,30 @@ export interface Config {
|
|||
modelPath: string,
|
||||
},
|
||||
},
|
||||
|
||||
/** Controlls and configures all body detection specific options
|
||||
* - enabled: true/false
|
||||
* - modelPath: paths for both hand detector model and hand skeleton model
|
||||
* - maxDetections: maximum number of people detected in the input, should be set to the minimum number for performance
|
||||
* - scoreThreshold: threshold for deciding when to remove people based on score in non-maximum suppression
|
||||
* - nmsRadius: threshold for deciding whether body parts overlap too much in non-maximum suppression
|
||||
* - modelPath: body pose model, can be absolute path or relative to modelBasePath
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - maxDetected: maximum number of people detected in the input, should be set to the minimum number for performance
|
||||
*/
|
||||
body: {
|
||||
enabled: boolean,
|
||||
modelPath: string,
|
||||
maxDetections: number,
|
||||
scoreThreshold: number,
|
||||
nmsRadius: number,
|
||||
maxDetected: number,
|
||||
minConfidence: number,
|
||||
},
|
||||
|
||||
/** Controlls and configures all hand detection specific options
|
||||
* - enabled: true/false
|
||||
* - modelPath: paths for both hand detector model and hand skeleton model
|
||||
* - landmarks: detect hand landmarks or just hand boundary box
|
||||
* - modelPath: paths for hand detector and hand skeleton models, can be absolute path or relative to modelBasePath
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - iouThreshold: ammount of overlap between two detected objects before one object is removed
|
||||
* - maxDetected: maximum number of hands detected in the input, should be set to the minimum number for performance
|
||||
* - rotation: use best-guess rotated hand image or just box with rotation as-is, false means higher performance, but incorrect finger mapping if hand is inverted
|
||||
* - skipFrames: how many frames to go without re-running the hand bounding box detector and just run modified hand skeleton detector, only valid if videoOptimized is set to true
|
||||
* - skipInitial: if previous detection resulted in no hands detected, should skipFrames be reset immediately to force new detection cycle
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - iouThreshold: threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||
* - scoreThreshold: threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||
* - maxHands: maximum number of hands detected in the input, should be set to the minimum number for performance
|
||||
* - landmarks: detect hand landmarks or just hand boundary box
|
||||
*/
|
||||
hand: {
|
||||
enabled: boolean,
|
||||
|
@ -171,8 +170,7 @@ export interface Config {
|
|||
skipInitial: boolean,
|
||||
minConfidence: number,
|
||||
iouThreshold: number,
|
||||
scoreThreshold: number,
|
||||
maxHands: number,
|
||||
maxDetected: number,
|
||||
landmarks: boolean,
|
||||
detector: {
|
||||
modelPath: string,
|
||||
|
@ -181,10 +179,13 @@ export interface Config {
|
|||
modelPath: string,
|
||||
},
|
||||
},
|
||||
|
||||
/** Controlls and configures all object detection specific options
|
||||
* - enabled: true/false
|
||||
* - modelPath: object detection model, can be absolute path or relative to modelBasePath
|
||||
* - minConfidence: minimum score that detection must have to return as valid object
|
||||
* - iouThreshold: ammount of overlap between two detected objects before one object is removed
|
||||
* - maxResults: maximum number of detections to return
|
||||
* - maxDetected: maximum number of detections to return
|
||||
* - skipFrames: run object detection every n input frames, only valid if videoOptimized is set to true
|
||||
*/
|
||||
object: {
|
||||
|
@ -192,40 +193,20 @@ export interface Config {
|
|||
modelPath: string,
|
||||
minConfidence: number,
|
||||
iouThreshold: number,
|
||||
maxResults: number,
|
||||
maxDetected: number,
|
||||
skipFrames: number,
|
||||
},
|
||||
}
|
||||
|
||||
const config: Config = {
|
||||
backend: 'webgl', // select tfjs backend to use
|
||||
backend: 'webgl', // select tfjs backend to use, leave empty to use default backend
|
||||
// can be 'webgl', 'wasm', 'cpu', or 'humangl' which is a custom version of webgl
|
||||
// leave as empty string to continue using default backend
|
||||
// when backend is set outside of Human library
|
||||
modelBasePath: '../models/', // base path for all models
|
||||
wasmPath: '../assets/', // path for wasm binaries
|
||||
// only used for backend: wasm
|
||||
wasmPath: '../assets/', // path for wasm binariesm, only used for backend: wasm
|
||||
debug: true, // print additional status messages to console
|
||||
async: true, // execute enabled models in parallel
|
||||
// this disables per-model performance data but
|
||||
// slightly increases performance
|
||||
// cannot be used if profiling is enabled
|
||||
profile: false, // internal: enable tfjs profiling
|
||||
// this has significant performance impact
|
||||
// only enable for debugging purposes
|
||||
// currently only implemented for age,gender,emotion models
|
||||
deallocate: false, // internal: aggresively deallocate gpu memory after each usage
|
||||
// only valid for webgl and humangl backend and only during first call
|
||||
// cannot be changed unless library is reloaded
|
||||
// this has significant performance impact
|
||||
// only enable on low-memory devices
|
||||
scoped: false, // internal: enable scoped runs
|
||||
// some models *may* have memory leaks,
|
||||
// this wrapps everything in a local scope at a cost of performance
|
||||
// typically not needed
|
||||
videoOptimized: true, // perform additional optimizations when input is video,
|
||||
// must be disabled for images
|
||||
// automatically disabled for Image, ImageData, ImageBitmap and Tensor inputs
|
||||
// automatically disabled for Image, ImageData, ImageBitmap
|
||||
// skips boundary detection for every n frames
|
||||
// while maintaining in-box detection since objects cannot move that fast
|
||||
warmup: 'face', // what to use for human.warmup(), can be 'none', 'face', 'full'
|
||||
|
@ -258,7 +239,7 @@ const config: Config = {
|
|||
},
|
||||
|
||||
gesture: {
|
||||
enabled: true, // enable simple gesture recognition
|
||||
enabled: true, // enable gesture recognition based on model results
|
||||
},
|
||||
|
||||
face: {
|
||||
|
@ -267,12 +248,11 @@ const config: Config = {
|
|||
// detector, mesh, iris, age, gender, emotion
|
||||
// (note: module is not loaded until it is required)
|
||||
detector: {
|
||||
modelPath: 'blazeface-back.json', // detector model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
modelPath: 'blazeface-back.json', // detector model, can be absolute path or relative to modelBasePath
|
||||
rotation: false, // use best-guess rotated face image or just box with rotation as-is
|
||||
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
||||
// this parameter is not valid in nodejs
|
||||
maxFaces: 10, // maximum number of faces detected in the input
|
||||
maxDetected: 10, // maximum number of faces detected in the input
|
||||
// should be set to the minimum number for performance
|
||||
skipFrames: 21, // how many frames to go without re-running the face bounding box detector
|
||||
// only used for video inputs
|
||||
|
@ -282,18 +262,13 @@ const config: Config = {
|
|||
skipInitial: false, // if previous detection resulted in no faces detected,
|
||||
// should skipFrames be reset immediately to force new detection cycle
|
||||
minConfidence: 0.2, // threshold for discarding a prediction
|
||||
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much in
|
||||
// non-maximum suppression (0.1 means drop if overlap 10%)
|
||||
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score
|
||||
// in non-maximum suppression,
|
||||
// this is applied on detection objects only and before minConfidence
|
||||
iouThreshold: 0.1, // ammount of overlap between two detected objects before one object is removed
|
||||
return: false, // return extracted face as tensor
|
||||
},
|
||||
|
||||
mesh: {
|
||||
enabled: true,
|
||||
modelPath: 'facemesh.json', // facemesh model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
modelPath: 'facemesh.json', // facemesh model, can be absolute path or relative to modelBasePath
|
||||
},
|
||||
|
||||
iris: {
|
||||
|
@ -316,25 +291,18 @@ const config: Config = {
|
|||
enabled: true,
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
skipFrames: 32, // how many frames to go without re-running the detector
|
||||
modelPath: 'emotion.json', // face emotion model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
modelPath: 'emotion.json', // face emotion model, can be absolute path or relative to modelBasePath
|
||||
},
|
||||
},
|
||||
|
||||
body: {
|
||||
enabled: true,
|
||||
modelPath: 'posenet.json', // body model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
// can be 'posenet', 'blazepose' or 'efficientpose'
|
||||
// 'blazepose' and 'efficientpose' are experimental
|
||||
maxDetections: 1, // maximum number of people detected in the input
|
||||
modelPath: 'posenet.json', // body model, can be absolute path or relative to modelBasePath
|
||||
// can be 'posenet' or 'blazepose'
|
||||
maxDetected: 1, // maximum number of people detected in the input
|
||||
// should be set to the minimum number for performance
|
||||
// only valid for posenet as blazepose only detects single pose
|
||||
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score
|
||||
// in non-maximum suppression
|
||||
// only valid for posenet as blazepose only detects single pose
|
||||
nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression
|
||||
// only valid for posenet as blazepose only detects single pose
|
||||
minConfidence: 0.2, // threshold for discarding a prediction
|
||||
},
|
||||
|
||||
hand: {
|
||||
|
@ -349,32 +317,24 @@ const config: Config = {
|
|||
skipInitial: false, // if previous detection resulted in no hands detected,
|
||||
// should skipFrames be reset immediately to force new detection cycle
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much
|
||||
// in non-maximum suppression
|
||||
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on
|
||||
// score in non-maximum suppression
|
||||
maxHands: 1, // maximum number of hands detected in the input
|
||||
iouThreshold: 0.1, // ammount of overlap between two detected objects before one object is removed
|
||||
maxDetected: 1, // maximum number of hands detected in the input
|
||||
// should be set to the minimum number for performance
|
||||
landmarks: true, // detect hand landmarks or just hand boundary box
|
||||
detector: {
|
||||
modelPath: 'handdetect.json', // hand detector model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
modelPath: 'handdetect.json', // hand detector model, can be absolute path or relative to modelBasePath
|
||||
},
|
||||
skeleton: {
|
||||
modelPath: 'handskeleton.json', // hand skeleton model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
modelPath: 'handskeleton.json', // hand skeleton model, can be absolute path or relative to modelBasePath
|
||||
},
|
||||
},
|
||||
|
||||
object: {
|
||||
enabled: false,
|
||||
modelPath: 'nanodet.json', // object detection model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
// 'nanodet' is experimental
|
||||
minConfidence: 0.20, // threshold for discarding a prediction
|
||||
iouThreshold: 0.40, // threshold for deciding whether boxes overlap too much
|
||||
// in non-maximum suppression
|
||||
maxResults: 10, // maximum number of objects detected in the input
|
||||
modelPath: 'nanodet.json', // experimental: object detection model, can be absolute path or relative to modelBasePath
|
||||
minConfidence: 0.2, // threshold for discarding a prediction
|
||||
iouThreshold: 0.4, // ammount of overlap between two detected objects before one object is removed
|
||||
maxDetected: 10, // maximum number of objects detected in the input
|
||||
skipFrames: 41, // how many frames to go without re-running the detector
|
||||
},
|
||||
};
|
||||
|
|
|
@ -294,68 +294,68 @@ export async function body(inCanvas: HTMLCanvasElement, result: Array<any>, draw
|
|||
// shoulder line
|
||||
points.length = 0;
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftShoulder');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightShoulder');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
curves(ctx, points, localOptions);
|
||||
// torso main
|
||||
points.length = 0;
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightShoulder');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightHip');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftHip');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftShoulder');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
if (points.length === 4) lines(ctx, points, localOptions); // only draw if we have complete torso
|
||||
// leg left
|
||||
points.length = 0;
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftHip');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftKnee');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftAnkle');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftHeel');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftFoot');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
curves(ctx, points, localOptions);
|
||||
// leg right
|
||||
points.length = 0;
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightHip');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightKnee');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightAnkle');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightHeel');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightFoot');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
curves(ctx, points, localOptions);
|
||||
// arm left
|
||||
points.length = 0;
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftShoulder');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftElbow');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftWrist');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftPalm');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
curves(ctx, points, localOptions);
|
||||
// arm right
|
||||
points.length = 0;
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightShoulder');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightElbow');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightWrist');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
part = result[i].keypoints.find((a) => a.part === 'rightPalm');
|
||||
if (part && part.score > defaults.body.scoreThreshold) points.push([part.position.x, part.position.y]);
|
||||
if (part && part.score > defaults.body.minConfidence) points.push([part.position.x, part.position.y]);
|
||||
curves(ctx, points, localOptions);
|
||||
// draw all
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
let model;
|
||||
let keypoints: Array<any> = [];
|
||||
|
@ -55,15 +54,7 @@ export async function predict(image, config) {
|
|||
});
|
||||
|
||||
let resT;
|
||||
|
||||
if (!config.profile) {
|
||||
if (config.body.enabled) resT = await model.predict(tensor);
|
||||
} else {
|
||||
const profileT = config.body.enabled ? await tf.profile(() => model.predict(tensor)) : {};
|
||||
resT = profileT.result.clone();
|
||||
profileT.result.dispose();
|
||||
profile.run('body', profileT);
|
||||
}
|
||||
tensor.dispose();
|
||||
|
||||
if (resT) {
|
||||
|
@ -76,8 +67,8 @@ export async function predict(image, config) {
|
|||
// process each unstacked tensor as a separate body part
|
||||
for (let id = 0; id < stack.length; id++) {
|
||||
// actual processing to get coordinates and score
|
||||
const [x, y, score] = max2d(stack[id], config.body.scoreThreshold);
|
||||
if (score > config.body.scoreThreshold) {
|
||||
const [x, y, score] = max2d(stack[id], config.body.minConfidence);
|
||||
if (score > config.body.minConfidence) {
|
||||
parts.push({
|
||||
id,
|
||||
score: Math.round(100 * score) / 100,
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
type Tensor = typeof tf.Tensor;
|
||||
type DB = Array<{ name: string, source: string, embedding: number[] }>;
|
||||
|
@ -87,7 +86,6 @@ export async function predict(input, config): Promise<number[]> {
|
|||
let data: Array<number> = [];
|
||||
if (config.face.embedding.enabled) {
|
||||
const image = enhance(input);
|
||||
if (!config.profile) {
|
||||
data = tf.tidy(() => {
|
||||
/*
|
||||
// if needed convert from NHWC to NCHW
|
||||
|
@ -121,12 +119,6 @@ export async function predict(input, config): Promise<number[]> {
|
|||
const output: Array<number> = reduce.dataSync();
|
||||
return [...output]; // convert typed array to simple array
|
||||
});
|
||||
} else {
|
||||
const profileData = await tf.profile(() => model.predict({ img_inputs: image }));
|
||||
data = [...profileData.result.dataSync()];
|
||||
profileData.result.dispose();
|
||||
profile.run('emotion', profileData);
|
||||
}
|
||||
tf.dispose(image);
|
||||
}
|
||||
resolve(data);
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
|
||||
let model;
|
||||
|
@ -46,17 +45,9 @@ export async function predict(image, config) {
|
|||
grayscale.dispose();
|
||||
const obj: Array<{ score: number, emotion: string }> = [];
|
||||
if (config.face.emotion.enabled) {
|
||||
let data;
|
||||
if (!config.profile) {
|
||||
const emotionT = await model.predict(normalize); // result is already in range 0..1, no need for additional activation
|
||||
data = emotionT.dataSync();
|
||||
const data = emotionT.dataSync();
|
||||
tf.dispose(emotionT);
|
||||
} else {
|
||||
const profileData = await tf.profile(() => model.predict(normalize));
|
||||
data = profileData.result.dataSync();
|
||||
profileData.result.dispose();
|
||||
profile.run('emotion', profileData);
|
||||
}
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
if (data[i] > config.face.emotion.minConfidence) obj.push({ score: Math.min(0.99, Math.trunc(100 * data[i]) / 100), emotion: annotations[i] });
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
let model;
|
||||
let last = { age: 0 };
|
||||
|
@ -108,13 +107,7 @@ export async function predict(image, config) {
|
|||
genderConfidence: <number>0,
|
||||
descriptor: <number[]>[] };
|
||||
|
||||
if (!config.profile) {
|
||||
if (config.face.description.enabled) resT = await model.predict(enhanced);
|
||||
} else {
|
||||
const profileDesc = config.face.description.enabled ? await tf.profile(() => model.predict(enhanced)) : {};
|
||||
resT = profileDesc.result;
|
||||
profile.run('faceres', profileDesc);
|
||||
}
|
||||
tf.dispose(enhanced);
|
||||
|
||||
if (resT) {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
let model;
|
||||
let last = { gender: '' };
|
||||
|
@ -49,14 +48,7 @@ export async function predict(image, config) {
|
|||
let genderT;
|
||||
const obj = { gender: '', confidence: 0 };
|
||||
|
||||
if (!config.profile) {
|
||||
if (config.face.gender.enabled) genderT = await model.predict(enhance);
|
||||
} else {
|
||||
const profileGender = config.face.gender.enabled ? await tf.profile(() => model.predict(enhance)) : {};
|
||||
genderT = profileGender.result.clone();
|
||||
profileGender.result.dispose();
|
||||
profile.run('gender', profileGender);
|
||||
}
|
||||
enhance.dispose();
|
||||
|
||||
if (genderT) {
|
||||
|
|
|
@ -46,7 +46,7 @@ export class HandDetector {
|
|||
const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);
|
||||
const boxes = this.normalizeBoxes(rawBoxes);
|
||||
rawBoxes.dispose();
|
||||
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxHands, config.hand.iouThreshold, config.hand.scoreThreshold);
|
||||
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
|
||||
const filtered = filteredT.arraySync();
|
||||
|
||||
scoresT.dispose();
|
||||
|
|
|
@ -83,7 +83,7 @@ export class HandPipeline {
|
|||
if (config.videoOptimized) this.skipped++;
|
||||
|
||||
// if detector result count doesn't match current working set, use it to reset current working set
|
||||
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxHands) || !config.hand.landmarks)) {
|
||||
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxDetected) || !config.hand.landmarks)) {
|
||||
this.detectedHands = 0;
|
||||
this.storedBoxes = [...boxes];
|
||||
// for (const possible of boxes) this.storedBoxes.push(possible);
|
||||
|
|
21
src/human.ts
21
src/human.ts
|
@ -13,7 +13,6 @@ import * as nanodet from './nanodet/nanodet';
|
|||
import * as gesture from './gesture/gesture';
|
||||
import * as image from './image/image';
|
||||
import * as draw from './draw/draw';
|
||||
import * as profile from './profile';
|
||||
import { Config, defaults } from './config';
|
||||
import { Result } from './result';
|
||||
import * as sample from './sample';
|
||||
|
@ -168,14 +167,6 @@ export class Human {
|
|||
this.sysinfo = sysinfo.info();
|
||||
}
|
||||
|
||||
/** Internal: ProfileData method returns last known profiling information
|
||||
* - Requires human.config.profile set to true
|
||||
*/
|
||||
profileData(): { newBytes, newTensors, peakBytes, numKernelOps, timeKernelOps, slowestKernelOps, largestKernelOps } | {} {
|
||||
if (this.config.profile) return profile.data;
|
||||
return {};
|
||||
}
|
||||
|
||||
// helper function: measure tensor leak
|
||||
/** @hidden */
|
||||
analyze = (...msg) => {
|
||||
|
@ -335,9 +326,9 @@ export class Human {
|
|||
if (this.tf.getBackend() === 'webgl' || this.tf.getBackend() === 'humangl') {
|
||||
this.tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
|
||||
this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||
if (this.config.deallocate) {
|
||||
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
|
||||
this.tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
|
||||
if (typeof this.config['deallocate'] !== 'undefined') {
|
||||
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);
|
||||
this.tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);
|
||||
}
|
||||
const gl = await this.tf.backend().getGPGPUContext().gl;
|
||||
if (this.config.debug) log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
|
||||
|
@ -378,9 +369,6 @@ export class Human {
|
|||
// load models if enabled
|
||||
await this.load();
|
||||
|
||||
if (this.config.scoped) this.tf.engine().startScope();
|
||||
this.analyze('Start Scope:');
|
||||
|
||||
// disable video optimization for inputs of type image, but skip if inside worker thread
|
||||
let previousVideoOptimized;
|
||||
// @ts-ignore ignore missing type for WorkerGlobalScope as that is the point
|
||||
|
@ -474,9 +462,6 @@ export class Human {
|
|||
}
|
||||
tf.dispose(process.tensor);
|
||||
|
||||
if (this.config.scoped) this.tf.engine().endScope();
|
||||
this.analyze('End Scope:');
|
||||
|
||||
// run gesture analysis last
|
||||
let gestureRes: any[] = [];
|
||||
if (this.config.gesture.enabled) {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
import { labels } from './labels';
|
||||
|
||||
let model;
|
||||
|
@ -83,7 +82,7 @@ async function process(res, inputSize, outputShape, config) {
|
|||
const nmsScores = results.map((a) => a.score);
|
||||
let nmsIdx: any[] = [];
|
||||
if (nmsBoxes && nmsBoxes.length > 0) {
|
||||
const nms = await tf.image.nonMaxSuppressionAsync(nmsBoxes, nmsScores, config.object.maxResults, config.object.iouThreshold, config.object.minConfidence);
|
||||
const nms = await tf.image.nonMaxSuppressionAsync(nmsBoxes, nmsScores, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
|
||||
nmsIdx = nms.dataSync();
|
||||
tf.dispose(nms);
|
||||
}
|
||||
|
@ -114,13 +113,7 @@ export async function predict(image, config) {
|
|||
resize.dispose();
|
||||
|
||||
let objectT;
|
||||
if (!config.profile) {
|
||||
if (config.object.enabled) objectT = await model.predict(transpose);
|
||||
} else {
|
||||
const profileObject = config.object.enabled ? await tf.profile(() => model.predict(transpose)) : {};
|
||||
objectT = profileObject.result;
|
||||
profile.run('object', profileObject);
|
||||
}
|
||||
transpose.dispose();
|
||||
|
||||
const obj = await process(objectT, model.inputSize, outputSize, config);
|
||||
|
|
|
@ -19,7 +19,7 @@ export async function predict(input, config) {
|
|||
const buffers = await Promise.all(res.map((tensor) => tensor.buffer()));
|
||||
for (const t of res) t.dispose();
|
||||
|
||||
const decoded = await poses.decode(buffers[0], buffers[1], buffers[2], buffers[3], config.body.nmsRadius, config.body.maxDetections, config.body.scoreThreshold);
|
||||
const decoded = await poses.decode(buffers[0], buffers[1], buffers[2], buffers[3], config.body.maxDetected, config.body.minConfidence);
|
||||
const scaled = util.scalePoses(decoded, [input.shape[1], input.shape[2]], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||
|
||||
return scaled;
|
||||
|
|
|
@ -3,6 +3,7 @@ import * as kpt from './keypoints';
|
|||
|
||||
const localMaximumRadius = 1;
|
||||
const defaultOutputStride = 16;
|
||||
const squaredNmsRadius = 20 ** 2;
|
||||
|
||||
function traverseToTargetKeypoint(edgeId, sourceKeypoint, targetKeypointId, scoresBuffer, offsets, outputStride, displacements, offsetRefineStep = 2) {
|
||||
const getDisplacement = (point) => ({
|
||||
|
@ -86,7 +87,7 @@ function scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, scor
|
|||
return localMaximum;
|
||||
}
|
||||
|
||||
export function buildPartWithScoreQueue(scoreThreshold, scores) {
|
||||
export function buildPartWithScoreQueue(minConfidence, scores) {
|
||||
const [height, width, numKeypoints] = scores.shape;
|
||||
const queue = new utils.MaxHeap(height * width * numKeypoints, ({ score }) => score);
|
||||
for (let heatmapY = 0; heatmapY < height; ++heatmapY) {
|
||||
|
@ -94,7 +95,7 @@ export function buildPartWithScoreQueue(scoreThreshold, scores) {
|
|||
for (let keypointId = 0; keypointId < numKeypoints; ++keypointId) {
|
||||
const score = scores.get(heatmapY, heatmapX, keypointId);
|
||||
// Only consider parts with score greater or equal to threshold as root candidates.
|
||||
if (score < scoreThreshold) continue;
|
||||
if (score < minConfidence) continue;
|
||||
// Only consider keypoints whose score is maximum in a local window.
|
||||
if (scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, scores)) queue.enqueue({ score, part: { heatmapY, heatmapX, id: keypointId } });
|
||||
}
|
||||
|
@ -103,38 +104,37 @@ export function buildPartWithScoreQueue(scoreThreshold, scores) {
|
|||
return queue;
|
||||
}
|
||||
|
||||
function withinRadius(poses, squaredNmsRadius, { x, y }, keypointId) {
|
||||
function withinRadius(poses, { x, y }, keypointId) {
|
||||
return poses.some(({ keypoints }) => {
|
||||
const correspondingKeypoint = keypoints[keypointId].position;
|
||||
return utils.squaredDistance(y, x, correspondingKeypoint.y, correspondingKeypoint.x) <= squaredNmsRadius;
|
||||
});
|
||||
}
|
||||
|
||||
function getInstanceScore(existingPoses, squaredNmsRadius, instanceKeypoints) {
|
||||
function getInstanceScore(existingPoses, instanceKeypoints) {
|
||||
const notOverlappedKeypointScores = instanceKeypoints.reduce((result, { position, score }, keypointId) => {
|
||||
if (!withinRadius(existingPoses, squaredNmsRadius, position, keypointId)) result += score;
|
||||
if (!withinRadius(existingPoses, position, keypointId)) result += score;
|
||||
return result;
|
||||
}, 0.0);
|
||||
return notOverlappedKeypointScores / instanceKeypoints.length;
|
||||
}
|
||||
|
||||
export function decode(offsetsBuffer, scoresBuffer, displacementsFwdBuffer, displacementsBwdBuffer, nmsRadius, maxDetections, scoreThreshold) {
|
||||
export function decode(offsetsBuffer, scoresBuffer, displacementsFwdBuffer, displacementsBwdBuffer, maxDetected, minConfidence) {
|
||||
const poses: Array<{ keypoints: any, box: any, score: number }> = [];
|
||||
const queue = buildPartWithScoreQueue(scoreThreshold, scoresBuffer);
|
||||
const squaredNmsRadius = nmsRadius ** 2;
|
||||
// Generate at most maxDetections object instances per image in decreasing root part score order.
|
||||
while (poses.length < maxDetections && !queue.empty()) {
|
||||
const queue = buildPartWithScoreQueue(minConfidence, scoresBuffer);
|
||||
// Generate at most maxDetected object instances per image in decreasing root part score order.
|
||||
while (poses.length < maxDetected && !queue.empty()) {
|
||||
// The top element in the queue is the next root candidate.
|
||||
const root = queue.dequeue();
|
||||
// Part-based non-maximum suppression: We reject a root candidate if it is within a disk of `nmsRadius` pixels from the corresponding part of a previously detected instance.
|
||||
const rootImageCoords = utils.getImageCoords(root.part, defaultOutputStride, offsetsBuffer);
|
||||
if (withinRadius(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue;
|
||||
if (withinRadius(poses, rootImageCoords, root.part.id)) continue;
|
||||
// Else start a new detection instance at the position of the root.
|
||||
const allKeypoints = decodePose(root, scoresBuffer, offsetsBuffer, defaultOutputStride, displacementsFwdBuffer, displacementsBwdBuffer);
|
||||
const keypoints = allKeypoints.filter((a) => a.score > scoreThreshold);
|
||||
const score = getInstanceScore(poses, squaredNmsRadius, keypoints);
|
||||
const keypoints = allKeypoints.filter((a) => a.score > minConfidence);
|
||||
const score = getInstanceScore(poses, keypoints);
|
||||
const box = utils.getBoundingBox(keypoints);
|
||||
if (score > scoreThreshold) poses.push({ keypoints, box, score: Math.round(100 * score) / 100 });
|
||||
if (score > minConfidence) poses.push({ keypoints, box, score: Math.round(100 * score) / 100 });
|
||||
}
|
||||
return poses;
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ export const data = {};
|
|||
|
||||
export function run(modelName: string, profileData: any): void {
|
||||
if (!profileData || !profileData.kernels) return;
|
||||
const maxResults = 5;
|
||||
const maxDetected = 5;
|
||||
const time = profileData.kernels
|
||||
.filter((a) => a.kernelTimeMs > 0)
|
||||
.reduce((a, b) => a += b.kernelTimeMs, 0);
|
||||
|
@ -16,8 +16,8 @@ export function run(modelName: string, profileData: any): void {
|
|||
.map((a, i) => { a.id = i; return a; })
|
||||
.filter((a) => a.totalBytesSnapshot > 0)
|
||||
.sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot);
|
||||
if (slowest.length > maxResults) slowest.length = maxResults;
|
||||
if (largest.length > maxResults) largest.length = maxResults;
|
||||
if (slowest.length > maxDetected) slowest.length = maxDetected;
|
||||
if (largest.length > maxDetected) largest.length = maxDetected;
|
||||
data[modelName] = {
|
||||
model: modelName,
|
||||
newBytes: profileData.newBytes,
|
||||
|
|
|
@ -1,62 +0,0 @@
|
|||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
/*
|
||||
Prototype implementation for model processing
|
||||
Must implement
|
||||
- load()
|
||||
- predict()
|
||||
Must account for:
|
||||
- image processing, tfjs profiling
|
||||
*/
|
||||
|
||||
let model;
|
||||
let last = { };
|
||||
let skipped = Number.MAX_SAFE_INTEGER;
|
||||
|
||||
export async function load(config) {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.prototype.modelPath));
|
||||
if (!model || !model.modelUrl) log('load model failed:', config.prototype.modelPath);
|
||||
else if (config.debug) log('load model:', model.modelUrl);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
||||
export async function predict(image, config) {
|
||||
if (!model) return null;
|
||||
if ((skipped < config.prototype.skipFrames) && config.videoOptimized && Object.keys(last).length > 0) {
|
||||
skipped++;
|
||||
return last;
|
||||
}
|
||||
if (config.videoOptimized) skipped = 0;
|
||||
else skipped = Number.MAX_SAFE_INTEGER;
|
||||
return new Promise(async (resolve) => {
|
||||
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
const enhance = tf.mul(resize, [255.0]);
|
||||
tf.dispose(resize);
|
||||
|
||||
let resT;
|
||||
|
||||
if (!config.profile) {
|
||||
if (config.prototype.enabled) resT = await model.predict(enhance);
|
||||
} else {
|
||||
const profileT = config.prototype.enabled ? await tf.profile(() => model.predict(enhance)) : {};
|
||||
resT = profileT.result.clone();
|
||||
profileT.result.dispose();
|
||||
profile.run('prototype', profileT);
|
||||
}
|
||||
enhance.dispose();
|
||||
|
||||
let obj = {};
|
||||
if (resT) {
|
||||
const data = resT.dataSync();
|
||||
obj = { data };
|
||||
tf.dispose(resT);
|
||||
}
|
||||
|
||||
last = obj;
|
||||
resolve(obj);
|
||||
});
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -118,7 +118,6 @@
|
|||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#enhance" class="tsd-kind-icon">enhance</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#load" class="tsd-kind-icon">load</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#match" class="tsd-kind-icon">match</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#profiledata" class="tsd-kind-icon">profile<wbr>Data</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#similarity" class="tsd-kind-icon">similarity</a></li>
|
||||
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#warmup" class="tsd-kind-icon">warmup</a></li>
|
||||
</ul>
|
||||
|
@ -713,28 +712,6 @@
|
|||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class">
|
||||
<a name="profiledata" class="tsd-anchor"></a>
|
||||
<h3>profile<wbr>Data</h3>
|
||||
<ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class">
|
||||
<li class="tsd-signature tsd-kind-icon">profile<wbr>Data<span class="tsd-signature-symbol">(</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{}</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-symbol">{ </span>largestKernelOps<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>newBytes<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>newTensors<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>numKernelOps<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>peakBytes<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>slowestKernelOps<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>timeKernelOps<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol"> }</span></li>
|
||||
</ul>
|
||||
<ul class="tsd-descriptions">
|
||||
<li class="tsd-description">
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
<div class="lead">
|
||||
<p>Internal: ProfileData method returns last known profiling information</p>
|
||||
<ul>
|
||||
<li>Requires human.config.profile set to true</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-symbol">{}</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-symbol">{ </span>largestKernelOps<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>newBytes<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>newTensors<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>numKernelOps<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>peakBytes<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>slowestKernelOps<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">; </span>timeKernelOps<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol"> }</span></h4>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class">
|
||||
<a name="similarity" class="tsd-anchor"></a>
|
||||
<h3>similarity</h3>
|
||||
|
@ -866,9 +843,6 @@
|
|||
<li class=" tsd-kind-method tsd-parent-kind-class">
|
||||
<a href="human.html#match" class="tsd-kind-icon">match</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-method tsd-parent-kind-class">
|
||||
<a href="human.html#profiledata" class="tsd-kind-icon">profile<wbr>Data</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-method tsd-parent-kind-class">
|
||||
<a href="human.html#similarity" class="tsd-kind-icon">similarity</a>
|
||||
</li>
|
||||
|
|
|
@ -89,7 +89,6 @@
|
|||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#async" class="tsd-kind-icon">async</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#backend" class="tsd-kind-icon">backend</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#body" class="tsd-kind-icon">body</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#deallocate" class="tsd-kind-icon">deallocate</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#debug" class="tsd-kind-icon">debug</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#face" class="tsd-kind-icon">face</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#filter" class="tsd-kind-icon">filter</a></li>
|
||||
|
@ -97,8 +96,6 @@
|
|||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#hand" class="tsd-kind-icon">hand</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#modelbasepath" class="tsd-kind-icon">model<wbr>Base<wbr>Path</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#object" class="tsd-kind-icon">object</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#profile" class="tsd-kind-icon">profile</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#scoped" class="tsd-kind-icon">scoped</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#videooptimized" class="tsd-kind-icon">video<wbr>Optimized</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#warmup" class="tsd-kind-icon">warmup</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="config.html#wasmpath" class="tsd-kind-icon">wasm<wbr>Path</a></li>
|
||||
|
@ -136,7 +133,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="body" class="tsd-anchor"></a>
|
||||
<h3>body</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">body<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>maxDetections<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>nmsRadius<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>scoreThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">body<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>maxDetected<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
|
@ -144,10 +141,9 @@
|
|||
<p>Controlls and configures all body detection specific options</p>
|
||||
<ul>
|
||||
<li>enabled: true/false</li>
|
||||
<li>modelPath: paths for both hand detector model and hand skeleton model</li>
|
||||
<li>maxDetections: maximum number of people detected in the input, should be set to the minimum number for performance</li>
|
||||
<li>scoreThreshold: threshold for deciding when to remove people based on score in non-maximum suppression</li>
|
||||
<li>nmsRadius: threshold for deciding whether body parts overlap too much in non-maximum suppression</li>
|
||||
<li>modelPath: body pose model, can be absolute path or relative to modelBasePath</li>
|
||||
<li>minConfidence: threshold for discarding a prediction</li>
|
||||
<li>maxDetected: maximum number of people detected in the input, should be set to the minimum number for performance</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -158,32 +154,17 @@
|
|||
<h5>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>max<wbr>Detections<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
<h5>max<wbr>Detected<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>min<wbr>Confidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>model<wbr>Path<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>nms<wbr>Radius<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>score<wbr>Threshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="deallocate" class="tsd-anchor"></a>
|
||||
<h3>deallocate</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">deallocate<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
<div class="lead">
|
||||
<p>Internal: Use aggressive GPU memory deallocator when backend is set to <code>webgl</code> or <code>humangl</code></p>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="debug" class="tsd-anchor"></a>
|
||||
<h3>debug</h3>
|
||||
|
@ -199,7 +180,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="face" class="tsd-anchor"></a>
|
||||
<h3>face</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">face<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>description<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>detector<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>iouThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>maxFaces<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>return<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>rotation<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>scoreThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>skipInitial<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>emotion<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>iris<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>mesh<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">face<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>description<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>detector<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>iouThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>maxDetected<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>return<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>rotation<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>skipInitial<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>emotion<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>iris<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>mesh<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
|
@ -209,15 +190,14 @@
|
|||
<li>face detection, face mesh detection, age, gender, emotion detection and face description
|
||||
Parameters:</li>
|
||||
<li>enabled: true/false</li>
|
||||
<li>modelPath: path for individual face model</li>
|
||||
<li>modelPath: path for each of face models</li>
|
||||
<li>minConfidence: threshold for discarding a prediction</li>
|
||||
<li>iouThreshold: ammount of overlap between two detected objects before one object is removed</li>
|
||||
<li>maxDetected: maximum number of faces detected in the input, should be set to the minimum number for performance</li>
|
||||
<li>rotation: use calculated rotated face image or just box with rotation as-is, false means higher performance, but incorrect mesh mapping on higher face angles</li>
|
||||
<li>maxFaces: maximum number of faces detected in the input, should be set to the minimum number for performance</li>
|
||||
<li>skipFrames: how many frames to go without re-running the face detector and just run modified face mesh analysis, only valid if videoOptimized is set to true</li>
|
||||
<li>skipInitial: if previous detection resulted in no faces detected, should skipFrames be reset immediately to force new detection cycle</li>
|
||||
<li>minConfidence: threshold for discarding a prediction</li>
|
||||
<li>iouThreshold: threshold for deciding whether boxes overlap too much in non-maximum suppression</li>
|
||||
<li>scoreThreshold: threshold for deciding when to remove boxes based on score in non-maximum suppression</li>
|
||||
<li>return extracted face as tensor for futher user processing</li>
|
||||
<li>return: return extracted face as tensor for futher user processing</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -242,13 +222,13 @@
|
|||
</ul>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>detector<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>iouThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>maxFaces<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>return<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>rotation<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>scoreThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>skipInitial<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol"> }</span></h5>
|
||||
<h5>detector<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>iouThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>maxDetected<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>return<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>rotation<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>skipInitial<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol"> }</span></h5>
|
||||
<ul class="tsd-parameters">
|
||||
<li class="tsd-parameter">
|
||||
<h5>iou<wbr>Threshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>max<wbr>Faces<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
<h5>max<wbr>Detected<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>min<wbr>Confidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
|
@ -262,9 +242,6 @@
|
|||
<li class="tsd-parameter">
|
||||
<h5>rotation<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>score<wbr>Threshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>skip<wbr>Frames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
|
@ -510,7 +487,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="hand" class="tsd-anchor"></a>
|
||||
<h3>hand</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">hand<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>detector<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>iouThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>landmarks<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>maxHands<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>rotation<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>scoreThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>skeleton<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>skipInitial<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">hand<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>detector<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>iouThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>landmarks<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>maxDetected<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>rotation<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>skeleton<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>skipInitial<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
|
@ -518,15 +495,14 @@
|
|||
<p>Controlls and configures all hand detection specific options</p>
|
||||
<ul>
|
||||
<li>enabled: true/false</li>
|
||||
<li>modelPath: paths for both hand detector model and hand skeleton model</li>
|
||||
<li>landmarks: detect hand landmarks or just hand boundary box</li>
|
||||
<li>modelPath: paths for hand detector and hand skeleton models, can be absolute path or relative to modelBasePath</li>
|
||||
<li>minConfidence: threshold for discarding a prediction</li>
|
||||
<li>iouThreshold: ammount of overlap between two detected objects before one object is removed</li>
|
||||
<li>maxDetected: maximum number of hands detected in the input, should be set to the minimum number for performance</li>
|
||||
<li>rotation: use best-guess rotated hand image or just box with rotation as-is, false means higher performance, but incorrect finger mapping if hand is inverted</li>
|
||||
<li>skipFrames: how many frames to go without re-running the hand bounding box detector and just run modified hand skeleton detector, only valid if videoOptimized is set to true</li>
|
||||
<li>skipInitial: if previous detection resulted in no hands detected, should skipFrames be reset immediately to force new detection cycle</li>
|
||||
<li>minConfidence: threshold for discarding a prediction</li>
|
||||
<li>iouThreshold: threshold for deciding whether boxes overlap too much in non-maximum suppression</li>
|
||||
<li>scoreThreshold: threshold for deciding when to remove boxes based on score in non-maximum suppression</li>
|
||||
<li>maxHands: maximum number of hands detected in the input, should be set to the minimum number for performance</li>
|
||||
<li>landmarks: detect hand landmarks or just hand boundary box</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -551,7 +527,7 @@
|
|||
<h5>landmarks<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>max<wbr>Hands<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
<h5>max<wbr>Detected<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>min<wbr>Confidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
|
@ -559,9 +535,6 @@
|
|||
<li class="tsd-parameter">
|
||||
<h5>rotation<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>score<wbr>Threshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>skeleton<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol"> }</span></h5>
|
||||
<ul class="tsd-parameters">
|
||||
|
@ -589,7 +562,7 @@
|
|||
<div class="lead">
|
||||
<p>Base model path (typically starting with file://, http:// or https://) for all models</p>
|
||||
<ul>
|
||||
<li>individual modelPath values are joined to this path</li>
|
||||
<li>individual modelPath values are relative to this path</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -597,16 +570,18 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="object" class="tsd-anchor"></a>
|
||||
<h3>object</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">object<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>iouThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>maxResults<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">object<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>enabled<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">boolean</span><span class="tsd-signature-symbol">; </span>iouThreshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>maxDetected<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>minConfidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>modelPath<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>skipFrames<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
<div class="lead">
|
||||
<p>Controlls and configures all object detection specific options</p>
|
||||
<ul>
|
||||
<li>enabled: true/false</li>
|
||||
<li>modelPath: object detection model, can be absolute path or relative to modelBasePath</li>
|
||||
<li>minConfidence: minimum score that detection must have to return as valid object</li>
|
||||
<li>iouThreshold: ammount of overlap between two detected objects before one object is removed</li>
|
||||
<li>maxResults: maximum number of detections to return</li>
|
||||
<li>maxDetected: maximum number of detections to return</li>
|
||||
<li>skipFrames: run object detection every n input frames, only valid if videoOptimized is set to true</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
@ -621,7 +596,7 @@
|
|||
<h5>iou<wbr>Threshold<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>max<wbr>Results<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
<h5>max<wbr>Detected<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>min<wbr>Confidence<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span></h5>
|
||||
|
@ -635,30 +610,6 @@
|
|||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="profile" class="tsd-anchor"></a>
|
||||
<h3>profile</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">profile<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
<div class="lead">
|
||||
<p>Collect and print profiling data during inference operations</p>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="scoped" class="tsd-anchor"></a>
|
||||
<h3>scoped</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">scoped<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
<div class="lead">
|
||||
<p>Internal: Run all inference operations in an explicit local scope run to avoid memory leaks</p>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="videooptimized" class="tsd-anchor"></a>
|
||||
<h3>video<wbr>Optimized</h3>
|
||||
|
@ -737,9 +688,6 @@
|
|||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="config.html#body" class="tsd-kind-icon">body</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="config.html#deallocate" class="tsd-kind-icon">deallocate</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="config.html#debug" class="tsd-kind-icon">debug</a>
|
||||
</li>
|
||||
|
@ -761,12 +709,6 @@
|
|||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="config.html#object" class="tsd-kind-icon">object</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="config.html#profile" class="tsd-kind-icon">profile</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="config.html#scoped" class="tsd-kind-icon">scoped</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="config.html#videooptimized" class="tsd-kind-icon">video<wbr>Optimized</a>
|
||||
</li>
|
||||
|
|
|
@ -12,12 +12,6 @@ export interface Config {
|
|||
debug: boolean;
|
||||
/** Perform model loading and inference concurrently or sequentially */
|
||||
async: boolean;
|
||||
/** Collect and print profiling data during inference operations */
|
||||
profile: boolean;
|
||||
/** Internal: Use aggressive GPU memory deallocator when backend is set to `webgl` or `humangl` */
|
||||
deallocate: boolean;
|
||||
/** Internal: Run all inference operations in an explicit local scope run to avoid memory leaks */
|
||||
scoped: boolean;
|
||||
/** Perform additional optimizations when input is video,
|
||||
* - must be disabled for images
|
||||
* - automatically disabled for Image, ImageData, ImageBitmap and Tensor inputs
|
||||
|
@ -30,7 +24,7 @@ export interface Config {
|
|||
*/
|
||||
warmup: 'none' | 'face' | 'full' | 'body';
|
||||
/** Base model path (typically starting with file://, http:// or https://) for all models
|
||||
* - individual modelPath values are joined to this path
|
||||
* - individual modelPath values are relative to this path
|
||||
*/
|
||||
modelBasePath: string;
|
||||
/** Run input through image filters before inference
|
||||
|
@ -89,27 +83,25 @@ export interface Config {
|
|||
* - face detection, face mesh detection, age, gender, emotion detection and face description
|
||||
* Parameters:
|
||||
* - enabled: true/false
|
||||
* - modelPath: path for individual face model
|
||||
* - modelPath: path for each of face models
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - iouThreshold: ammount of overlap between two detected objects before one object is removed
|
||||
* - maxDetected: maximum number of faces detected in the input, should be set to the minimum number for performance
|
||||
* - rotation: use calculated rotated face image or just box with rotation as-is, false means higher performance, but incorrect mesh mapping on higher face angles
|
||||
* - maxFaces: maximum number of faces detected in the input, should be set to the minimum number for performance
|
||||
* - skipFrames: how many frames to go without re-running the face detector and just run modified face mesh analysis, only valid if videoOptimized is set to true
|
||||
* - skipInitial: if previous detection resulted in no faces detected, should skipFrames be reset immediately to force new detection cycle
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - iouThreshold: threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||
* - scoreThreshold: threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||
* - return extracted face as tensor for futher user processing
|
||||
* - return: return extracted face as tensor for futher user processing
|
||||
*/
|
||||
face: {
|
||||
enabled: boolean;
|
||||
detector: {
|
||||
modelPath: string;
|
||||
rotation: boolean;
|
||||
maxFaces: number;
|
||||
maxDetected: number;
|
||||
skipFrames: number;
|
||||
skipInitial: boolean;
|
||||
minConfidence: number;
|
||||
iouThreshold: number;
|
||||
scoreThreshold: number;
|
||||
return: boolean;
|
||||
};
|
||||
mesh: {
|
||||
|
@ -135,29 +127,26 @@ export interface Config {
|
|||
};
|
||||
/** Controlls and configures all body detection specific options
|
||||
* - enabled: true/false
|
||||
* - modelPath: paths for both hand detector model and hand skeleton model
|
||||
* - maxDetections: maximum number of people detected in the input, should be set to the minimum number for performance
|
||||
* - scoreThreshold: threshold for deciding when to remove people based on score in non-maximum suppression
|
||||
* - nmsRadius: threshold for deciding whether body parts overlap too much in non-maximum suppression
|
||||
* - modelPath: body pose model, can be absolute path or relative to modelBasePath
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - maxDetected: maximum number of people detected in the input, should be set to the minimum number for performance
|
||||
*/
|
||||
body: {
|
||||
enabled: boolean;
|
||||
modelPath: string;
|
||||
maxDetections: number;
|
||||
scoreThreshold: number;
|
||||
nmsRadius: number;
|
||||
maxDetected: number;
|
||||
minConfidence: number;
|
||||
};
|
||||
/** Controlls and configures all hand detection specific options
|
||||
* - enabled: true/false
|
||||
* - modelPath: paths for both hand detector model and hand skeleton model
|
||||
* - landmarks: detect hand landmarks or just hand boundary box
|
||||
* - modelPath: paths for hand detector and hand skeleton models, can be absolute path or relative to modelBasePath
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - iouThreshold: ammount of overlap between two detected objects before one object is removed
|
||||
* - maxDetected: maximum number of hands detected in the input, should be set to the minimum number for performance
|
||||
* - rotation: use best-guess rotated hand image or just box with rotation as-is, false means higher performance, but incorrect finger mapping if hand is inverted
|
||||
* - skipFrames: how many frames to go without re-running the hand bounding box detector and just run modified hand skeleton detector, only valid if videoOptimized is set to true
|
||||
* - skipInitial: if previous detection resulted in no hands detected, should skipFrames be reset immediately to force new detection cycle
|
||||
* - minConfidence: threshold for discarding a prediction
|
||||
* - iouThreshold: threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||
* - scoreThreshold: threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||
* - maxHands: maximum number of hands detected in the input, should be set to the minimum number for performance
|
||||
* - landmarks: detect hand landmarks or just hand boundary box
|
||||
*/
|
||||
hand: {
|
||||
enabled: boolean;
|
||||
|
@ -166,8 +155,7 @@ export interface Config {
|
|||
skipInitial: boolean;
|
||||
minConfidence: number;
|
||||
iouThreshold: number;
|
||||
scoreThreshold: number;
|
||||
maxHands: number;
|
||||
maxDetected: number;
|
||||
landmarks: boolean;
|
||||
detector: {
|
||||
modelPath: string;
|
||||
|
@ -177,9 +165,11 @@ export interface Config {
|
|||
};
|
||||
};
|
||||
/** Controlls and configures all object detection specific options
|
||||
* - enabled: true/false
|
||||
* - modelPath: object detection model, can be absolute path or relative to modelBasePath
|
||||
* - minConfidence: minimum score that detection must have to return as valid object
|
||||
* - iouThreshold: ammount of overlap between two detected objects before one object is removed
|
||||
* - maxResults: maximum number of detections to return
|
||||
* - maxDetected: maximum number of detections to return
|
||||
* - skipFrames: run object detection every n input frames, only valid if videoOptimized is set to true
|
||||
*/
|
||||
object: {
|
||||
|
@ -187,7 +177,7 @@ export interface Config {
|
|||
modelPath: string;
|
||||
minConfidence: number;
|
||||
iouThreshold: number;
|
||||
maxResults: number;
|
||||
maxDetected: number;
|
||||
skipFrames: number;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -111,18 +111,6 @@ export declare class Human {
|
|||
* - @param userConfig: {@link Config}
|
||||
*/
|
||||
constructor(userConfig?: Config | Object);
|
||||
/** Internal: ProfileData method returns last known profiling information
|
||||
* - Requires human.config.profile set to true
|
||||
*/
|
||||
profileData(): {
|
||||
newBytes: any;
|
||||
newTensors: any;
|
||||
peakBytes: any;
|
||||
numKernelOps: any;
|
||||
timeKernelOps: any;
|
||||
slowestKernelOps: any;
|
||||
largestKernelOps: any;
|
||||
} | {};
|
||||
/** @hidden */
|
||||
analyze: (...msg: any[]) => void;
|
||||
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import * as utils from './utils';
|
||||
export declare function decodePose(root: any, scores: any, offsets: any, outputStride: any, displacementsFwd: any, displacementsBwd: any): any[];
|
||||
export declare function buildPartWithScoreQueue(scoreThreshold: any, scores: any): utils.MaxHeap;
|
||||
export declare function decode(offsetsBuffer: any, scoresBuffer: any, displacementsFwdBuffer: any, displacementsBwdBuffer: any, nmsRadius: any, maxDetections: any, scoreThreshold: any): {
|
||||
export declare function buildPartWithScoreQueue(minConfidence: any, scores: any): utils.MaxHeap;
|
||||
export declare function decode(offsetsBuffer: any, scoresBuffer: any, displacementsFwdBuffer: any, displacementsBwdBuffer: any, maxDetected: any, minConfidence: any): {
|
||||
keypoints: any;
|
||||
box: any;
|
||||
score: number;
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
export declare const data: {};
|
||||
export declare function run(modelName: string, profileData: any): void;
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit 3b81af15f2560de5c06f20cbd8de57caf62682f2
|
||||
Subproject commit 906244487754b61fd24f49fe2db91ea68264137d
|
Loading…
Reference in New Issue