mirror of https://github.com/vladmandic/human
improved caching and warmup
parent
4cf71ea08e
commit
cebd075047
12
config.js
12
config.js
|
@ -26,7 +26,9 @@ export default {
|
||||||
// must be disabled for images
|
// must be disabled for images
|
||||||
// basically this skips object box boundary detection for every n frames
|
// basically this skips object box boundary detection for every n frames
|
||||||
// while maintaining in-box detection since objects cannot move that fast
|
// while maintaining in-box detection since objects cannot move that fast
|
||||||
|
warmup: 'full', // what to use for human.warmup(), can be 'none', 'face', 'full'
|
||||||
|
// warmup pre-initializes all models for faster inference but can take
|
||||||
|
// significant time on startup
|
||||||
filter: {
|
filter: {
|
||||||
enabled: true, // enable image pre-processing filters
|
enabled: true, // enable image pre-processing filters
|
||||||
width: 0, // resize input width
|
width: 0, // resize input width
|
||||||
|
@ -69,7 +71,7 @@ export default {
|
||||||
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
||||||
maxFaces: 10, // maximum number of faces detected in the input
|
maxFaces: 10, // maximum number of faces detected in the input
|
||||||
// should be set to the minimum number for performance
|
// should be set to the minimum number for performance
|
||||||
skipFrames: 20, // how many frames to go without re-running the face bounding box detector
|
skipFrames: 11, // how many frames to go without re-running the face bounding box detector
|
||||||
// only used for video inputs
|
// only used for video inputs
|
||||||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||||
// box for updated face analysis as the head probably hasn't moved much
|
// box for updated face analysis as the head probably hasn't moved much
|
||||||
|
@ -99,7 +101,7 @@ export default {
|
||||||
modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'
|
modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'
|
||||||
// which determines training set for model
|
// which determines training set for model
|
||||||
inputSize: 64, // fixed value
|
inputSize: 64, // fixed value
|
||||||
skipFrames: 41, // how many frames to go without re-running the detector
|
skipFrames: 31, // how many frames to go without re-running the detector
|
||||||
// only used for video inputs
|
// only used for video inputs
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -108,7 +110,7 @@ export default {
|
||||||
minConfidence: 0.1, // threshold for discarding a prediction
|
minConfidence: 0.1, // threshold for discarding a prediction
|
||||||
modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
|
modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
|
||||||
inputSize: 64, // fixed value
|
inputSize: 64, // fixed value
|
||||||
skipFrames: 42, // how many frames to go without re-running the detector
|
skipFrames: 41, // how many frames to go without re-running the detector
|
||||||
// only used for video inputs
|
// only used for video inputs
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -143,7 +145,7 @@ export default {
|
||||||
rotation: false, // use best-guess rotated hand image or just box with rotation as-is
|
rotation: false, // use best-guess rotated hand image or just box with rotation as-is
|
||||||
// false means higher performance, but incorrect finger mapping if hand is inverted
|
// false means higher performance, but incorrect finger mapping if hand is inverted
|
||||||
inputSize: 256, // fixed value
|
inputSize: 256, // fixed value
|
||||||
skipFrames: 19, // how many frames to go without re-running the hand bounding box detector
|
skipFrames: 12, // how many frames to go without re-running the hand bounding box detector
|
||||||
// only used for video inputs
|
// only used for video inputs
|
||||||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||||
// box for updated hand skeleton analysis as the hand probably
|
// box for updated hand skeleton analysis as the hand probably
|
||||||
|
|
|
@ -37,7 +37,6 @@ const ui = {
|
||||||
console: true,
|
console: true,
|
||||||
maxFPSframes: 10,
|
maxFPSframes: 10,
|
||||||
modelsPreload: true,
|
modelsPreload: true,
|
||||||
modelsWarmup: true,
|
|
||||||
menuWidth: 0,
|
menuWidth: 0,
|
||||||
menuHeight: 0,
|
menuHeight: 0,
|
||||||
camera: {},
|
camera: {},
|
||||||
|
@ -518,7 +517,7 @@ async function main() {
|
||||||
status('loading');
|
status('loading');
|
||||||
await human.load(userConfig); // this is not required, just pre-loads all models
|
await human.load(userConfig); // this is not required, just pre-loads all models
|
||||||
}
|
}
|
||||||
if (ui.modelsWarmup && !ui.useWorker) {
|
if (!ui.useWorker) {
|
||||||
status('initializing');
|
status('initializing');
|
||||||
await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference
|
await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytes": 1786155,
|
"bytes": 1830186,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"demo/draw.js": {
|
"demo/draw.js": {
|
||||||
|
@ -17,7 +17,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytes": 25469,
|
"bytes": 25428,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/human.esm.js"
|
"path": "dist/human.esm.js"
|
||||||
|
@ -38,14 +38,14 @@
|
||||||
"dist/demo-browser-index.js.map": {
|
"dist/demo-browser-index.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 2747800
|
"bytes": 2749760
|
||||||
},
|
},
|
||||||
"dist/demo-browser-index.js": {
|
"dist/demo-browser-index.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"exports": [],
|
"exports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytesInOutput": 1778915
|
"bytesInOutput": 1822946
|
||||||
},
|
},
|
||||||
"demo/draw.js": {
|
"demo/draw.js": {
|
||||||
"bytesInOutput": 7816
|
"bytesInOutput": 7816
|
||||||
|
@ -57,10 +57,10 @@
|
||||||
"bytesInOutput": 7382
|
"bytesInOutput": 7382
|
||||||
},
|
},
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytesInOutput": 19563
|
"bytesInOutput": 19529
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1832865
|
"bytes": 1876862
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -36,7 +36,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 13856,
|
"bytes": 13836,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/tfjs.esm.js"
|
"path": "dist/tfjs.esm.js"
|
||||||
|
@ -84,7 +84,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytes": 1943,
|
"bytes": 2017,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js"
|
"path": "src/log.js"
|
||||||
|
@ -98,7 +98,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytes": 3384,
|
"bytes": 2886,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js"
|
"path": "src/log.js"
|
||||||
|
@ -112,7 +112,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2981,
|
"bytes": 3055,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js"
|
"path": "src/log.js"
|
||||||
|
@ -264,7 +264,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/hand/handdetector.js": {
|
"src/hand/handdetector.js": {
|
||||||
"bytes": 4252,
|
"bytes": 4253,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/tfjs.esm.js"
|
"path": "dist/tfjs.esm.js"
|
||||||
|
@ -279,7 +279,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytes": 7923,
|
"bytes": 7886,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/tfjs.esm.js"
|
"path": "dist/tfjs.esm.js"
|
||||||
|
@ -342,7 +342,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 8990,
|
"bytes": 9243,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/sample.js": {
|
"src/sample.js": {
|
||||||
|
@ -354,7 +354,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 16524,
|
"bytes": 16894,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js"
|
"path": "src/log.js"
|
||||||
|
@ -408,7 +408,7 @@
|
||||||
"dist/human.esm.js.map": {
|
"dist/human.esm.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 2652161
|
"bytes": 2656485
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -429,7 +429,7 @@
|
||||||
"bytesInOutput": 30817
|
"bytesInOutput": 30817
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 9323
|
"bytesInOutput": 9314
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytesInOutput": 2182
|
"bytesInOutput": 2182
|
||||||
|
@ -438,13 +438,13 @@
|
||||||
"bytesInOutput": 846
|
"bytesInOutput": 846
|
||||||
},
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytesInOutput": 1112
|
"bytesInOutput": 1169
|
||||||
},
|
},
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytesInOutput": 1858
|
"bytesInOutput": 1915
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 1738
|
"bytesInOutput": 1795
|
||||||
},
|
},
|
||||||
"src/embedding/embedding.js": {
|
"src/embedding/embedding.js": {
|
||||||
"bytesInOutput": 1310
|
"bytesInOutput": 1310
|
||||||
|
@ -483,10 +483,10 @@
|
||||||
"bytesInOutput": 832
|
"bytesInOutput": 832
|
||||||
},
|
},
|
||||||
"src/hand/handdetector.js": {
|
"src/hand/handdetector.js": {
|
||||||
"bytesInOutput": 2730
|
"bytesInOutput": 2731
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytesInOutput": 4456
|
"bytesInOutput": 4447
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
"bytesInOutput": 127032
|
"bytesInOutput": 127032
|
||||||
|
@ -510,7 +510,7 @@
|
||||||
"bytesInOutput": 1520210
|
"bytesInOutput": 1520210
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 10905
|
"bytesInOutput": 11073
|
||||||
},
|
},
|
||||||
"src/hand/box.js": {
|
"src/hand/box.js": {
|
||||||
"bytesInOutput": 1473
|
"bytesInOutput": 1473
|
||||||
|
@ -519,16 +519,16 @@
|
||||||
"bytesInOutput": 1796
|
"bytesInOutput": 1796
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1440
|
"bytesInOutput": 1454
|
||||||
},
|
},
|
||||||
"src/sample.js": {
|
"src/sample.js": {
|
||||||
"bytesInOutput": 11646
|
"bytesInOutput": 55341
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 21
|
"bytesInOutput": 21
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1786155
|
"bytes": 1830186
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -36,7 +36,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 13856,
|
"bytes": 13836,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/tfjs.esm.js"
|
"path": "dist/tfjs.esm.js"
|
||||||
|
@ -84,7 +84,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytes": 1943,
|
"bytes": 2017,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js"
|
"path": "src/log.js"
|
||||||
|
@ -98,7 +98,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytes": 3384,
|
"bytes": 2886,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js"
|
"path": "src/log.js"
|
||||||
|
@ -112,7 +112,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2981,
|
"bytes": 3055,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js"
|
"path": "src/log.js"
|
||||||
|
@ -264,7 +264,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/hand/handdetector.js": {
|
"src/hand/handdetector.js": {
|
||||||
"bytes": 4252,
|
"bytes": 4253,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/tfjs.esm.js"
|
"path": "dist/tfjs.esm.js"
|
||||||
|
@ -279,7 +279,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytes": 7923,
|
"bytes": 7886,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/tfjs.esm.js"
|
"path": "dist/tfjs.esm.js"
|
||||||
|
@ -342,7 +342,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 8990,
|
"bytes": 9243,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/sample.js": {
|
"src/sample.js": {
|
||||||
|
@ -354,7 +354,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 16524,
|
"bytes": 16894,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js"
|
"path": "src/log.js"
|
||||||
|
@ -408,7 +408,7 @@
|
||||||
"dist/human.js.map": {
|
"dist/human.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 2669160
|
"bytes": 2673870
|
||||||
},
|
},
|
||||||
"dist/human.js": {
|
"dist/human.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -427,7 +427,7 @@
|
||||||
"bytesInOutput": 30817
|
"bytesInOutput": 30817
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 9323
|
"bytesInOutput": 9314
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytesInOutput": 2182
|
"bytesInOutput": 2182
|
||||||
|
@ -436,13 +436,13 @@
|
||||||
"bytesInOutput": 846
|
"bytesInOutput": 846
|
||||||
},
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytesInOutput": 1112
|
"bytesInOutput": 1169
|
||||||
},
|
},
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytesInOutput": 1858
|
"bytesInOutput": 1915
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 1738
|
"bytesInOutput": 1795
|
||||||
},
|
},
|
||||||
"src/embedding/embedding.js": {
|
"src/embedding/embedding.js": {
|
||||||
"bytesInOutput": 1310
|
"bytesInOutput": 1310
|
||||||
|
@ -481,10 +481,10 @@
|
||||||
"bytesInOutput": 832
|
"bytesInOutput": 832
|
||||||
},
|
},
|
||||||
"src/hand/handdetector.js": {
|
"src/hand/handdetector.js": {
|
||||||
"bytesInOutput": 2730
|
"bytesInOutput": 2731
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytesInOutput": 4456
|
"bytesInOutput": 4447
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
"bytesInOutput": 127032
|
"bytesInOutput": 127032
|
||||||
|
@ -502,7 +502,7 @@
|
||||||
"bytesInOutput": 4008
|
"bytesInOutput": 4008
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 10966
|
"bytesInOutput": 11134
|
||||||
},
|
},
|
||||||
"src/log.js": {
|
"src/log.js": {
|
||||||
"bytesInOutput": 266
|
"bytesInOutput": 266
|
||||||
|
@ -517,16 +517,16 @@
|
||||||
"bytesInOutput": 1796
|
"bytesInOutput": 1796
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1440
|
"bytesInOutput": 1454
|
||||||
},
|
},
|
||||||
"src/sample.js": {
|
"src/sample.js": {
|
||||||
"bytesInOutput": 11646
|
"bytesInOutput": 55341
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 21
|
"bytesInOutput": 21
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1786229
|
"bytes": 1830260
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ import * as profile from '../profile.js';
|
||||||
|
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = { age: 0 };
|
let last = { age: 0 };
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
if (!models.age) {
|
if (!models.age) {
|
||||||
|
@ -16,11 +16,12 @@ async function load(config) {
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
if (!models.age) return null;
|
if (!models.age) return null;
|
||||||
if ((frame < config.face.age.skipFrames) && config.videoOptimized && last.age && (last.age > 0)) {
|
if ((skipped < config.face.age.skipFrames) && config.videoOptimized && last.age && (last.age > 0)) {
|
||||||
frame += 1;
|
skipped++;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
if (config.videoOptimized) skipped = 0;
|
||||||
|
else skipped = Number.MAX_SAFE_INTEGER;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
/*
|
/*
|
||||||
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
||||||
|
|
|
@ -5,7 +5,7 @@ import * as profile from '../profile.js';
|
||||||
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];
|
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = [];
|
let last = [];
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
// tuning values
|
// tuning values
|
||||||
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||||
|
@ -21,11 +21,12 @@ async function load(config) {
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
if (!models.emotion) return null;
|
if (!models.emotion) return null;
|
||||||
if ((frame < config.face.emotion.skipFrames) && config.videoOptimized && (last.length > 0)) {
|
if ((skipped < config.face.emotion.skipFrames) && config.videoOptimized && (last.length > 0)) {
|
||||||
frame += 1;
|
skipped++;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
if (config.videoOptimized) skipped = 0;
|
||||||
|
else skipped = Number.MAX_SAFE_INTEGER;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
/*
|
/*
|
||||||
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
||||||
|
|
|
@ -51,7 +51,7 @@ class Pipeline {
|
||||||
this.meshHeight = config.face.mesh.inputSize;
|
this.meshHeight = config.face.mesh.inputSize;
|
||||||
this.irisSize = config.face.iris.inputSize;
|
this.irisSize = config.face.iris.inputSize;
|
||||||
this.irisEnlarge = 2.3;
|
this.irisEnlarge = 2.3;
|
||||||
this.skipped = 1000;
|
this.skipped = 0;
|
||||||
this.detectedFaces = 0;
|
this.detectedFaces = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -129,15 +129,14 @@ class Pipeline {
|
||||||
}
|
}
|
||||||
|
|
||||||
async predict(input, config) {
|
async predict(input, config) {
|
||||||
this.skipped++;
|
|
||||||
let useFreshBox = false;
|
let useFreshBox = false;
|
||||||
// run new detector every skipFrames unless we only want box to start with
|
// run new detector every skipFrames unless we only want box to start with
|
||||||
let detector;
|
let detector;
|
||||||
if ((this.skipped > config.face.detector.skipFrames) || !config.face.mesh.enabled || !config.videoOptimized) {
|
if ((this.skipped === 0) || (this.skipped > config.face.detector.skipFrames) || !config.face.mesh.enabled || !config.videoOptimized) {
|
||||||
detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||||
// don't reset on test image
|
this.skipped = 0;
|
||||||
if ((input.shape[1] !== 255) && (input.shape[2] !== 255)) this.skipped = 0;
|
|
||||||
}
|
}
|
||||||
|
if (config.videoOptimized) this.skipped++;
|
||||||
|
|
||||||
// if detector result count doesn't match current working set, use it to reset current working set
|
// if detector result count doesn't match current working set, use it to reset current working set
|
||||||
if (detector && detector.boxes && (detector.boxes.length > 0) && (!config.face.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.face.detector.maxFaces))) {
|
if (detector && detector.boxes && (detector.boxes.length > 0) && (!config.face.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.face.detector.maxFaces))) {
|
||||||
|
@ -172,7 +171,7 @@ class Pipeline {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// log(this.skipped, config.face.detector.skipFrames, this.detectedFaces, config.face.detector.maxFaces, detector?.boxes.length, this.storedBoxes.length);
|
// log('face', `skipped: ${this.skipped} max: ${config.face.detector.maxFaces} detected: ${this.detectedFaces} stored: ${this.storedBoxes.length} new: ${detector?.boxes?.length}`);
|
||||||
let results = tf.tidy(() => this.storedBoxes.map((box, i) => {
|
let results = tf.tidy(() => this.storedBoxes.map((box, i) => {
|
||||||
// The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).
|
// The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).
|
||||||
let face;
|
let face;
|
||||||
|
@ -207,7 +206,6 @@ class Pipeline {
|
||||||
const [, confidence, contourCoords] = this.meshDetector.predict(face); // The first returned tensor represents facial contours, which are included in the coordinates.
|
const [, confidence, contourCoords] = this.meshDetector.predict(face); // The first returned tensor represents facial contours, which are included in the coordinates.
|
||||||
const confidenceVal = confidence.dataSync()[0];
|
const confidenceVal = confidence.dataSync()[0];
|
||||||
if (confidenceVal < config.face.detector.minConfidence) return null; // if below confidence just exit
|
if (confidenceVal < config.face.detector.minConfidence) return null; // if below confidence just exit
|
||||||
|
|
||||||
const coordsReshaped = tf.reshape(contourCoords, [-1, 3]);
|
const coordsReshaped = tf.reshape(contourCoords, [-1, 3]);
|
||||||
let rawCoords = coordsReshaped.arraySync();
|
let rawCoords = coordsReshaped.arraySync();
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ import * as profile from '../profile.js';
|
||||||
|
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = { gender: '' };
|
let last = { gender: '' };
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
let alternative = false;
|
let alternative = false;
|
||||||
|
|
||||||
// tuning values
|
// tuning values
|
||||||
|
@ -21,22 +21,13 @@ async function load(config) {
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
if (!models.gender) return null;
|
if (!models.gender) return null;
|
||||||
if ((frame < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') {
|
if ((skipped < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') {
|
||||||
frame += 1;
|
skipped++;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
if (config.videoOptimized) skipped = 0;
|
||||||
|
else skipped = Number.MAX_SAFE_INTEGER;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
/*
|
|
||||||
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
|
||||||
const box = [[
|
|
||||||
(image.shape[1] * zoom[0]) / image.shape[1],
|
|
||||||
(image.shape[2] * zoom[1]) / image.shape[2],
|
|
||||||
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
|
|
||||||
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
|
|
||||||
]];
|
|
||||||
const resize = tf.image.cropAndResize(image, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);
|
|
||||||
*/
|
|
||||||
const resize = tf.image.resizeBilinear(image, [config.face.gender.inputSize, config.face.gender.inputSize], false);
|
const resize = tf.image.resizeBilinear(image, [config.face.gender.inputSize, config.face.gender.inputSize], false);
|
||||||
let enhance;
|
let enhance;
|
||||||
if (alternative) {
|
if (alternative) {
|
||||||
|
@ -51,7 +42,6 @@ async function predict(image, config) {
|
||||||
} else {
|
} else {
|
||||||
enhance = tf.mul(resize, [255.0]);
|
enhance = tf.mul(resize, [255.0]);
|
||||||
}
|
}
|
||||||
// const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
|
|
||||||
tf.dispose(resize);
|
tf.dispose(resize);
|
||||||
|
|
||||||
let genderT;
|
let genderT;
|
||||||
|
|
|
@ -81,8 +81,8 @@ class HandDetector {
|
||||||
const image = tf.tidy(() => input.resizeBilinear([config.hand.inputSize, config.hand.inputSize]).div(127.5).sub(1));
|
const image = tf.tidy(() => input.resizeBilinear([config.hand.inputSize, config.hand.inputSize]).div(127.5).sub(1));
|
||||||
const predictions = await this.getBoxes(image, config);
|
const predictions = await this.getBoxes(image, config);
|
||||||
image.dispose();
|
image.dispose();
|
||||||
if (!predictions || predictions.length === 0) return null;
|
|
||||||
const hands = [];
|
const hands = [];
|
||||||
|
if (!predictions || predictions.length === 0) return hands;
|
||||||
for (const prediction of predictions) {
|
for (const prediction of predictions) {
|
||||||
const boxes = prediction.box.dataSync();
|
const boxes = prediction.box.dataSync();
|
||||||
const startPoint = boxes.slice(0, 2);
|
const startPoint = boxes.slice(0, 2);
|
||||||
|
|
|
@ -35,7 +35,7 @@ class HandPipeline {
|
||||||
this.landmarkDetector = landmarkDetector;
|
this.landmarkDetector = landmarkDetector;
|
||||||
this.inputSize = inputSize;
|
this.inputSize = inputSize;
|
||||||
this.storedBoxes = [];
|
this.storedBoxes = [];
|
||||||
this.skipped = 1000;
|
this.skipped = 0;
|
||||||
this.detectedHands = 0;
|
this.detectedHands = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,16 +84,15 @@ class HandPipeline {
|
||||||
}
|
}
|
||||||
|
|
||||||
async estimateHands(image, config) {
|
async estimateHands(image, config) {
|
||||||
this.skipped++;
|
|
||||||
let useFreshBox = false;
|
let useFreshBox = false;
|
||||||
|
|
||||||
// run new detector every skipFrames unless we only want box to start with
|
// run new detector every skipFrames unless we only want box to start with
|
||||||
let boxes;
|
let boxes;
|
||||||
if ((this.skipped > config.hand.skipFrames) || !config.hand.landmarks || !config.videoOptimized) {
|
if ((this.skipped === 0) || (this.skipped > config.hand.skipFrames) || !config.hand.landmarks || !config.videoOptimized) {
|
||||||
boxes = await this.handDetector.estimateHandBounds(image, config);
|
boxes = await this.handDetector.estimateHandBounds(image, config);
|
||||||
// don't reset on test image
|
this.skipped = 0;
|
||||||
if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;
|
|
||||||
}
|
}
|
||||||
|
if (config.videoOptimized) this.skipped++;
|
||||||
|
|
||||||
// if detector result count doesn't match current working set, use it to reset current working set
|
// if detector result count doesn't match current working set, use it to reset current working set
|
||||||
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxHands) || !config.hand.landmarks)) {
|
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxHands) || !config.hand.landmarks)) {
|
||||||
|
@ -103,7 +102,7 @@ class HandPipeline {
|
||||||
if (this.storedBoxes.length > 0) useFreshBox = true;
|
if (this.storedBoxes.length > 0) useFreshBox = true;
|
||||||
}
|
}
|
||||||
const hands = [];
|
const hands = [];
|
||||||
// log(`skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
|
// log('hand', `skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
|
||||||
|
|
||||||
// go through working set of boxes
|
// go through working set of boxes
|
||||||
for (let i = 0; i < this.storedBoxes.length; i++) {
|
for (let i = 0; i < this.storedBoxes.length; i++) {
|
||||||
|
|
36
src/human.js
36
src/human.js
|
@ -418,26 +418,42 @@ class Human {
|
||||||
|
|
||||||
async warmup(userConfig) {
|
async warmup(userConfig) {
|
||||||
if (userConfig) this.config = mergeDeep(this.config, userConfig);
|
if (userConfig) this.config = mergeDeep(this.config, userConfig);
|
||||||
const width = 256;
|
return new Promise((resolve) => {
|
||||||
const height = 256;
|
|
||||||
const video = this.config.videoOptimized;
|
const video = this.config.videoOptimized;
|
||||||
this.config.videoOptimized = false;
|
this.config.videoOptimized = false;
|
||||||
return new Promise((resolve) => {
|
let src;
|
||||||
const img = new Image(width, height);
|
let size;
|
||||||
|
switch (this.config.warmup) {
|
||||||
|
case 'face':
|
||||||
|
size = 256;
|
||||||
|
src = sample.face;
|
||||||
|
break;
|
||||||
|
case 'full':
|
||||||
|
size = 1200;
|
||||||
|
src = sample.body;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
size = 0;
|
||||||
|
src = null;
|
||||||
|
}
|
||||||
|
const img = new Image(size, size);
|
||||||
img.onload = () => {
|
img.onload = () => {
|
||||||
const canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(width, height) : document.createElement('canvas');
|
const canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(size, size) : document.createElement('canvas');
|
||||||
canvas.width = width;
|
canvas.width = size;
|
||||||
canvas.height = height;
|
canvas.height = size;
|
||||||
const ctx = canvas.getContext('2d');
|
const ctx = canvas.getContext('2d');
|
||||||
ctx.drawImage(img, 0, 0);
|
ctx.drawImage(img, 0, 0);
|
||||||
const data = ctx.getImageData(0, 0, width, height);
|
const data = ctx.getImageData(0, 0, size, size);
|
||||||
|
const t0 = now();
|
||||||
this.detect(data, config).then((warmup) => {
|
this.detect(data, config).then((warmup) => {
|
||||||
log('Warmup', warmup);
|
const t1 = now();
|
||||||
|
log('Warmup', this.config.warmup, (t1 - t0), warmup);
|
||||||
this.config.videoOptimized = video;
|
this.config.videoOptimized = video;
|
||||||
resolve(warmup);
|
resolve(warmup);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
img.src = sample.face;
|
if (src) img.src = src;
|
||||||
|
else resolve(null);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
||||||
Subproject commit 785bde4caa1a29d8bfe82a4ae987ffde1d9a0a73
|
Subproject commit c4c8b30f6bf211ee267cf1884aaff9725f594631
|
Loading…
Reference in New Issue