mirror of https://github.com/vladmandic/human
reimplemented blazeface processing
parent
1cad163327
commit
a47e45b855
|
@ -71,10 +71,10 @@ export default {
|
||||||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||||
// box for updated face analysis as the head probably hasn't moved much
|
// box for updated face analysis as the head probably hasn't moved much
|
||||||
// in short time (10 * 1/25 = 0.25 sec)
|
// in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.1, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much in
|
iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in
|
||||||
// non-maximum suppression (0.1 means drop if overlap 10%)
|
// non-maximum suppression (0.1 means drop if overlap 10%)
|
||||||
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score
|
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score
|
||||||
// in non-maximum suppression,
|
// in non-maximum suppression,
|
||||||
// this is applied on detection objects only and before minConfidence
|
// this is applied on detection objects only and before minConfidence
|
||||||
},
|
},
|
||||||
|
|
|
@ -393,8 +393,7 @@ async function main() {
|
||||||
// this is not required, just pre-warms all models for faster initial inference
|
// this is not required, just pre-warms all models for faster initial inference
|
||||||
if (ui.modelsWarmup) {
|
if (ui.modelsWarmup) {
|
||||||
status('initializing');
|
status('initializing');
|
||||||
const warmup = new ImageData(256, 256);
|
await human.warmup(userConfig);
|
||||||
await human.detect(warmup, userConfig);
|
|
||||||
}
|
}
|
||||||
status('human: ready');
|
status('human: ready');
|
||||||
document.getElementById('loader').style.display = 'none';
|
document.getElementById('loader').style.display = 'none';
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
<!-- <script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-webgpu@0.0.1-alpha.0/dist/tf-webgpu.js"></script> -->
|
<!-- <script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-webgpu@0.0.1-alpha.0/dist/tf-webgpu.js"></script> -->
|
||||||
<!-- load compiled demo js -->
|
<!-- load compiled demo js -->
|
||||||
<script src="../dist/demo-browser-index.js"></script>
|
<script src="../dist/demo-browser-index.js"></script>
|
||||||
<!-- alternatively load demo sources directly -->
|
<!-- alternatively load demo sources directly, this is not supported on mobile platforms as they don't support type=module -->
|
||||||
<!-- <script src="browser.js" type="module"></script> -->
|
<!-- <script src="./browser.js" type="module"></script> -->
|
||||||
<style>
|
<style>
|
||||||
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 400; src: local('Lato'), url('../assets/lato.ttf') format('truetype'); }
|
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 400; src: local('Lato'), url('../assets/lato.ttf') format('truetype'); }
|
||||||
html { font-family: 'Lato', 'Segoe UI'; font-size: 16px; font-variant: small-caps; }
|
html { font-family: 'Lato', 'Segoe UI'; font-size: 16px; font-variant: small-caps; }
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytes": 18563,
|
"bytes": 18511,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/human.esm.js"
|
"path": "dist/human.esm.js"
|
||||||
|
@ -23,7 +23,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytes": 1278633,
|
"bytes": 1278581,
|
||||||
"imports": []
|
"imports": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -31,13 +31,13 @@
|
||||||
"dist/demo-browser-index.js.map": {
|
"dist/demo-browser-index.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5535482
|
"bytes": 5534501
|
||||||
},
|
},
|
||||||
"dist/demo-browser-index.js": {
|
"dist/demo-browser-index.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytesInOutput": 1665139
|
"bytesInOutput": 1665015
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytesInOutput": 8716
|
"bytesInOutput": 8716
|
||||||
|
@ -49,10 +49,10 @@
|
||||||
"bytesInOutput": 13425
|
"bytesInOutput": 13425
|
||||||
},
|
},
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytesInOutput": 16209
|
"bytesInOutput": 16157
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1712507
|
"bytes": 1712331
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -128,11 +128,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytes": 7058,
|
"bytes": 6937,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/face/box.js": {
|
"src/face/box.js": {
|
||||||
"bytes": 1924,
|
"bytes": 1929,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
|
@ -156,7 +156,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 14752,
|
"bytes": 13695,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/face/box.js"
|
"path": "src/face/box.js"
|
||||||
|
@ -194,7 +194,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytes": 3233,
|
"bytes": 3306,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
|
@ -214,7 +214,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytes": 7445,
|
"bytes": 7541,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/hand/box.js"
|
"path": "src/hand/box.js"
|
||||||
|
@ -243,7 +243,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 13770,
|
"bytes": 13918,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/face/facemesh.js"
|
"path": "src/face/facemesh.js"
|
||||||
|
@ -281,7 +281,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/image.js": {
|
"src/image.js": {
|
||||||
"bytes": 4604,
|
"bytes": 4648,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/imagefx.js"
|
"path": "src/imagefx.js"
|
||||||
|
@ -301,13 +301,13 @@
|
||||||
"dist/human.esm-nobundle.js.map": {
|
"dist/human.esm-nobundle.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 621753
|
"bytes": 620819
|
||||||
},
|
},
|
||||||
"dist/human.esm-nobundle.js": {
|
"dist/human.esm-nobundle.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 3093
|
"bytesInOutput": 3103
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
"src/face/keypoints.js": {
|
||||||
"bytesInOutput": 1945
|
"bytesInOutput": 1945
|
||||||
|
@ -319,7 +319,7 @@
|
||||||
"bytesInOutput": 1171
|
"bytesInOutput": 1171
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 5647
|
"bytesInOutput": 5432
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
"src/face/uvcoords.js": {
|
||||||
"bytesInOutput": 16785
|
"bytesInOutput": 16785
|
||||||
|
@ -385,7 +385,7 @@
|
||||||
"bytesInOutput": 997
|
"bytesInOutput": 997
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytesInOutput": 2741
|
"bytesInOutput": 2781
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
"bytesInOutput": 127000
|
"bytesInOutput": 127000
|
||||||
|
@ -394,13 +394,13 @@
|
||||||
"bytesInOutput": 1219
|
"bytesInOutput": 1219
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytesInOutput": 1574
|
"bytesInOutput": 1601
|
||||||
},
|
},
|
||||||
"src/imagefx.js": {
|
"src/imagefx.js": {
|
||||||
"bytesInOutput": 11013
|
"bytesInOutput": 11013
|
||||||
},
|
},
|
||||||
"src/image.js": {
|
"src/image.js": {
|
||||||
"bytesInOutput": 2349
|
"bytesInOutput": 2343
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1279
|
"bytesInOutput": 1279
|
||||||
|
@ -409,13 +409,13 @@
|
||||||
"bytesInOutput": 3047
|
"bytesInOutput": 3047
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 7256
|
"bytesInOutput": 7348
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 216959
|
"bytes": 216907
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -288,7 +288,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytes": 7058,
|
"bytes": 6937,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -296,7 +296,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/box.js": {
|
"src/face/box.js": {
|
||||||
"bytes": 1924,
|
"bytes": 1929,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -327,7 +327,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 14752,
|
"bytes": 13695,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -371,7 +371,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytes": 3233,
|
"bytes": 3306,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
|
@ -398,7 +398,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytes": 7445,
|
"bytes": 7541,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -433,7 +433,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 13770,
|
"bytes": 13918,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -474,7 +474,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/image.js": {
|
"src/image.js": {
|
||||||
"bytes": 4604,
|
"bytes": 4648,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -513,7 +513,7 @@
|
||||||
"dist/human.esm.js.map": {
|
"dist/human.esm.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5417738
|
"bytes": 5416804
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -576,7 +576,7 @@
|
||||||
"bytesInOutput": 760
|
"bytesInOutput": 760
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 3103
|
"bytesInOutput": 3113
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
"src/face/keypoints.js": {
|
||||||
"bytesInOutput": 1946
|
"bytesInOutput": 1946
|
||||||
|
@ -588,7 +588,7 @@
|
||||||
"bytesInOutput": 1190
|
"bytesInOutput": 1190
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 5639
|
"bytesInOutput": 5425
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
"src/face/uvcoords.js": {
|
||||||
"bytesInOutput": 16786
|
"bytesInOutput": 16786
|
||||||
|
@ -654,7 +654,7 @@
|
||||||
"bytesInOutput": 1005
|
"bytesInOutput": 1005
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytesInOutput": 2742
|
"bytesInOutput": 2782
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
"bytesInOutput": 127001
|
"bytesInOutput": 127001
|
||||||
|
@ -663,13 +663,13 @@
|
||||||
"bytesInOutput": 1197
|
"bytesInOutput": 1197
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytesInOutput": 1575
|
"bytesInOutput": 1602
|
||||||
},
|
},
|
||||||
"src/imagefx.js": {
|
"src/imagefx.js": {
|
||||||
"bytesInOutput": 11014
|
"bytesInOutput": 11014
|
||||||
},
|
},
|
||||||
"src/image.js": {
|
"src/image.js": {
|
||||||
"bytesInOutput": 2365
|
"bytesInOutput": 2358
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1280
|
"bytesInOutput": 1280
|
||||||
|
@ -678,13 +678,13 @@
|
||||||
"bytesInOutput": 3048
|
"bytesInOutput": 3048
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 7274
|
"bytesInOutput": 7366
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1278633
|
"bytes": 1278581
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -288,7 +288,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytes": 7058,
|
"bytes": 6937,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -296,7 +296,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/box.js": {
|
"src/face/box.js": {
|
||||||
"bytes": 1924,
|
"bytes": 1929,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -327,7 +327,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 14752,
|
"bytes": 13695,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -371,7 +371,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytes": 3233,
|
"bytes": 3306,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
|
@ -398,7 +398,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytes": 7445,
|
"bytes": 7541,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -433,7 +433,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 13770,
|
"bytes": 13918,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -474,7 +474,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/image.js": {
|
"src/image.js": {
|
||||||
"bytes": 4604,
|
"bytes": 4648,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -513,7 +513,7 @@
|
||||||
"dist/human.js.map": {
|
"dist/human.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5417734
|
"bytes": 5416800
|
||||||
},
|
},
|
||||||
"dist/human.js": {
|
"dist/human.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -576,7 +576,7 @@
|
||||||
"bytesInOutput": 760
|
"bytesInOutput": 760
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 3103
|
"bytesInOutput": 3113
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
"src/face/keypoints.js": {
|
||||||
"bytesInOutput": 1946
|
"bytesInOutput": 1946
|
||||||
|
@ -588,7 +588,7 @@
|
||||||
"bytesInOutput": 1190
|
"bytesInOutput": 1190
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 5639
|
"bytesInOutput": 5425
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
"src/face/uvcoords.js": {
|
||||||
"bytesInOutput": 16786
|
"bytesInOutput": 16786
|
||||||
|
@ -654,7 +654,7 @@
|
||||||
"bytesInOutput": 1005
|
"bytesInOutput": 1005
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytesInOutput": 2742
|
"bytesInOutput": 2782
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
"bytesInOutput": 127001
|
"bytesInOutput": 127001
|
||||||
|
@ -663,13 +663,13 @@
|
||||||
"bytesInOutput": 1197
|
"bytesInOutput": 1197
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytesInOutput": 1575
|
"bytesInOutput": 1602
|
||||||
},
|
},
|
||||||
"src/imagefx.js": {
|
"src/imagefx.js": {
|
||||||
"bytesInOutput": 11014
|
"bytesInOutput": 11014
|
||||||
},
|
},
|
||||||
"src/image.js": {
|
"src/image.js": {
|
||||||
"bytesInOutput": 2365
|
"bytesInOutput": 2358
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1280
|
"bytesInOutput": 1280
|
||||||
|
@ -678,10 +678,10 @@
|
||||||
"bytesInOutput": 3047
|
"bytesInOutput": 3047
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 7312
|
"bytesInOutput": 7404
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1278678
|
"bytes": 1278626
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -128,11 +128,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytes": 7058,
|
"bytes": 6937,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/face/box.js": {
|
"src/face/box.js": {
|
||||||
"bytes": 1924,
|
"bytes": 1929,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
|
@ -156,7 +156,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 14752,
|
"bytes": 13695,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/face/box.js"
|
"path": "src/face/box.js"
|
||||||
|
@ -194,7 +194,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytes": 3233,
|
"bytes": 3306,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
|
@ -214,7 +214,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytes": 7445,
|
"bytes": 7541,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/hand/box.js"
|
"path": "src/hand/box.js"
|
||||||
|
@ -243,7 +243,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 13770,
|
"bytes": 13918,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/face/facemesh.js"
|
"path": "src/face/facemesh.js"
|
||||||
|
@ -281,7 +281,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/image.js": {
|
"src/image.js": {
|
||||||
"bytes": 4604,
|
"bytes": 4648,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/imagefx.js"
|
"path": "src/imagefx.js"
|
||||||
|
@ -301,13 +301,13 @@
|
||||||
"dist/human.node-nobundle.js.map": {
|
"dist/human.node-nobundle.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 635930
|
"bytes": 635150
|
||||||
},
|
},
|
||||||
"dist/human.node-nobundle.js": {
|
"dist/human.node-nobundle.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 3093
|
"bytesInOutput": 3103
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
"src/face/keypoints.js": {
|
||||||
"bytesInOutput": 1945
|
"bytesInOutput": 1945
|
||||||
|
@ -319,7 +319,7 @@
|
||||||
"bytesInOutput": 1171
|
"bytesInOutput": 1171
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 5647
|
"bytesInOutput": 5432
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
"src/face/uvcoords.js": {
|
||||||
"bytesInOutput": 16785
|
"bytesInOutput": 16785
|
||||||
|
@ -385,7 +385,7 @@
|
||||||
"bytesInOutput": 996
|
"bytesInOutput": 996
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytesInOutput": 2741
|
"bytesInOutput": 2781
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
"bytesInOutput": 127000
|
"bytesInOutput": 127000
|
||||||
|
@ -394,13 +394,13 @@
|
||||||
"bytesInOutput": 1219
|
"bytesInOutput": 1219
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytesInOutput": 1574
|
"bytesInOutput": 1601
|
||||||
},
|
},
|
||||||
"src/imagefx.js": {
|
"src/imagefx.js": {
|
||||||
"bytesInOutput": 11013
|
"bytesInOutput": 11013
|
||||||
},
|
},
|
||||||
"src/image.js": {
|
"src/image.js": {
|
||||||
"bytesInOutput": 2349
|
"bytesInOutput": 2343
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1278
|
"bytesInOutput": 1278
|
||||||
|
@ -412,10 +412,10 @@
|
||||||
"bytesInOutput": 28
|
"bytesInOutput": 28
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 7256
|
"bytesInOutput": 7348
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 216966
|
"bytes": 216914
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,21 +110,17 @@ class BlazeFaceModel {
|
||||||
return vals;
|
return vals;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const scoresVal = scores.dataSync();
|
||||||
const annotatedBoxes = [];
|
const annotatedBoxes = [];
|
||||||
for (let i = 0; i < boundingBoxes.length; i++) {
|
for (const i in boundingBoxes) {
|
||||||
const boundingBox = boundingBoxes[i];
|
|
||||||
const box = createBox(boundingBox);
|
|
||||||
const boxIndex = boxIndices[i];
|
const boxIndex = boxIndices[i];
|
||||||
const anchor = this.anchorsData[boxIndex];
|
const confidence = scoresVal[boxIndex];
|
||||||
const sliced = tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]);
|
if (confidence > this.config.detector.minConfidence) {
|
||||||
const squeezed = sliced.squeeze();
|
const box = createBox(boundingBoxes[i]);
|
||||||
const landmarks = squeezed.reshape([NUM_LANDMARKS, -1]);
|
const anchor = this.anchorsData[boxIndex];
|
||||||
const probability = tf.slice(scores, [boxIndex], [1]);
|
const landmarks = tf.tidy(() => tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1]));
|
||||||
const annotatedBox = { box, landmarks, probability, anchor };
|
annotatedBoxes.push({ box, landmarks, anchor, confidence });
|
||||||
annotatedBoxes.push(annotatedBox);
|
}
|
||||||
sliced.dispose();
|
|
||||||
squeezed.dispose();
|
|
||||||
// landmarks.dispose();
|
|
||||||
}
|
}
|
||||||
detectedOutputs.dispose();
|
detectedOutputs.dispose();
|
||||||
boxes.dispose();
|
boxes.dispose();
|
||||||
|
|
|
@ -6,6 +6,7 @@ function scaleBoxCoordinates(box, factor) {
|
||||||
return { startPoint, endPoint };
|
return { startPoint, endPoint };
|
||||||
}
|
}
|
||||||
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
||||||
|
|
||||||
function getBoxSize(box) {
|
function getBoxSize(box) {
|
||||||
return [
|
return [
|
||||||
Math.abs(box.endPoint[0] - box.startPoint[0]),
|
Math.abs(box.endPoint[0] - box.startPoint[0]),
|
||||||
|
@ -13,6 +14,7 @@ function getBoxSize(box) {
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
exports.getBoxSize = getBoxSize;
|
exports.getBoxSize = getBoxSize;
|
||||||
|
|
||||||
function getBoxCenter(box) {
|
function getBoxCenter(box) {
|
||||||
return [
|
return [
|
||||||
box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,
|
box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,
|
||||||
|
@ -20,6 +22,7 @@ function getBoxCenter(box) {
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
exports.getBoxCenter = getBoxCenter;
|
exports.getBoxCenter = getBoxCenter;
|
||||||
|
|
||||||
function cutBoxFromImageAndResize(box, image, cropSize) {
|
function cutBoxFromImageAndResize(box, image, cropSize) {
|
||||||
const h = image.shape[1];
|
const h = image.shape[1];
|
||||||
const w = image.shape[2];
|
const w = image.shape[2];
|
||||||
|
@ -30,6 +33,7 @@ function cutBoxFromImageAndResize(box, image, cropSize) {
|
||||||
return tf.image.cropAndResize(image, boxes, [0], cropSize);
|
return tf.image.cropAndResize(image, boxes, [0], cropSize);
|
||||||
}
|
}
|
||||||
exports.cutBoxFromImageAndResize = cutBoxFromImageAndResize;
|
exports.cutBoxFromImageAndResize = cutBoxFromImageAndResize;
|
||||||
|
|
||||||
function enlargeBox(box, factor = 1.5) {
|
function enlargeBox(box, factor = 1.5) {
|
||||||
const center = getBoxCenter(box);
|
const center = getBoxCenter(box);
|
||||||
const size = getBoxSize(box);
|
const size = getBoxSize(box);
|
||||||
|
@ -39,6 +43,7 @@ function enlargeBox(box, factor = 1.5) {
|
||||||
return { startPoint, endPoint, landmarks: box.landmarks };
|
return { startPoint, endPoint, landmarks: box.landmarks };
|
||||||
}
|
}
|
||||||
exports.enlargeBox = enlargeBox;
|
exports.enlargeBox = enlargeBox;
|
||||||
|
|
||||||
function squarifyBox(box) {
|
function squarifyBox(box) {
|
||||||
const centers = getBoxCenter(box);
|
const centers = getBoxCenter(box);
|
||||||
const size = getBoxSize(box);
|
const size = getBoxSize(box);
|
||||||
|
|
|
@ -5,7 +5,6 @@ const keypoints = require('./keypoints');
|
||||||
const util = require('./util');
|
const util = require('./util');
|
||||||
|
|
||||||
const LANDMARKS_COUNT = 468;
|
const LANDMARKS_COUNT = 468;
|
||||||
const UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD = 0.25;
|
|
||||||
const MESH_MOUTH_INDEX = 13;
|
const MESH_MOUTH_INDEX = 13;
|
||||||
const MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [MESH_MOUTH_INDEX, keypoints.MESH_ANNOTATIONS['midwayBetweenEyes'][0]];
|
const MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [MESH_MOUTH_INDEX, keypoints.MESH_ANNOTATIONS['midwayBetweenEyes'][0]];
|
||||||
const BLAZEFACE_MOUTH_INDEX = 3;
|
const BLAZEFACE_MOUTH_INDEX = 3;
|
||||||
|
@ -41,7 +40,7 @@ function replaceRawCoordinates(rawCoords, newCoords, prefix, keys) {
|
||||||
class Pipeline {
|
class Pipeline {
|
||||||
constructor(boundingBoxDetector, meshDetector, irisModel, config) {
|
constructor(boundingBoxDetector, meshDetector, irisModel, config) {
|
||||||
// An array of facial bounding boxes.
|
// An array of facial bounding boxes.
|
||||||
this.regionsOfInterest = [];
|
this.storedBoxes = [];
|
||||||
this.runsWithoutFaceDetector = 0;
|
this.runsWithoutFaceDetector = 0;
|
||||||
this.boundingBoxDetector = boundingBoxDetector;
|
this.boundingBoxDetector = boundingBoxDetector;
|
||||||
this.meshDetector = meshDetector;
|
this.meshDetector = meshDetector;
|
||||||
|
@ -50,6 +49,8 @@ class Pipeline {
|
||||||
this.meshHeight = config.mesh.inputSize;
|
this.meshHeight = config.mesh.inputSize;
|
||||||
this.irisSize = config.iris.inputSize;
|
this.irisSize = config.iris.inputSize;
|
||||||
this.irisEnlarge = 2.3;
|
this.irisEnlarge = 2.3;
|
||||||
|
this.skipped = 1000;
|
||||||
|
this.detectedFaces = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
transformRawCoords(rawCoords, box, angle, rotationMatrix) {
|
transformRawCoords(rawCoords, box, angle, rotationMatrix) {
|
||||||
|
@ -129,35 +130,39 @@ class Pipeline {
|
||||||
}
|
}
|
||||||
|
|
||||||
async predict(input, config) {
|
async predict(input, config) {
|
||||||
this.runsWithoutFaceDetector += 1;
|
this.skipped++;
|
||||||
let useFreshBox = (this.detectedFaces === 0) || (this.detectedFaces !== this.regionsOfInterest.length);
|
let useFreshBox = false;
|
||||||
|
// run new detector every skipFrames unless we only want box to start with
|
||||||
let detector;
|
let detector;
|
||||||
// but every skipFrames check if detect boxes number changed
|
if ((this.skipped > config.detector.skipFrames) || !config.mesh.enabled) {
|
||||||
if (useFreshBox || (this.runsWithoutFaceDetector > config.detector.skipFrames)) detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||||
// if there are new boxes and number of boxes doesn't match use new boxes, but not if maxhands is fixed to 1
|
// don't reset on test image
|
||||||
if (config.detector.maxFaces > 1 && detector && detector.boxes && detector.boxes.length > 0 && detector.boxes.length !== this.detectedFaces) useFreshBox = true;
|
if ((input.shape[1] !== 255) && (input.shape[2] !== 255)) this.skipped = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if detector result count doesn't match current working set, use it to reset current working set
|
||||||
|
if (detector && detector.boxes && (detector.boxes.length > 0) && (!config.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.detector.maxFaces))) {
|
||||||
|
this.storedBoxes = [];
|
||||||
|
this.detectedFaces = 0;
|
||||||
|
for (const possible of detector.boxes) {
|
||||||
|
this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks, confidence: possible.confidence });
|
||||||
|
}
|
||||||
|
if (this.storedBoxes.length > 0) useFreshBox = true;
|
||||||
|
}
|
||||||
|
|
||||||
if (useFreshBox) {
|
if (useFreshBox) {
|
||||||
// const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
|
||||||
if (!detector || !detector.boxes || (detector.boxes.length === 0)) {
|
if (!detector || !detector.boxes || (detector.boxes.length === 0)) {
|
||||||
this.regionsOfInterest = [];
|
this.storedBoxes = [];
|
||||||
this.detectedFaces = 0;
|
this.detectedFaces = 0;
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
const scaledBoxes = detector.boxes.map((prediction) => {
|
for (const i in this.storedBoxes) {
|
||||||
const startPoint = prediction.box.startPoint.squeeze();
|
const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);
|
||||||
const endPoint = prediction.box.endPoint.squeeze();
|
|
||||||
const predictionBox = {
|
|
||||||
startPoint: startPoint.arraySync(),
|
|
||||||
endPoint: endPoint.arraySync(),
|
|
||||||
};
|
|
||||||
startPoint.dispose();
|
|
||||||
endPoint.dispose();
|
|
||||||
const scaledBox = bounding.scaleBoxCoordinates(predictionBox, detector.scaleFactor);
|
|
||||||
const enlargedBox = bounding.enlargeBox(scaledBox);
|
const enlargedBox = bounding.enlargeBox(scaledBox);
|
||||||
const landmarks = prediction.landmarks.arraySync();
|
const landmarks = this.storedBoxes[i].landmarks.arraySync();
|
||||||
return { ...enlargedBox, landmarks };
|
const confidence = this.storedBoxes[i].confidence;
|
||||||
});
|
this.storedBoxes[i] = { ...enlargedBox, confidence, landmarks };
|
||||||
this.updateRegionsOfInterest(scaledBoxes);
|
}
|
||||||
this.runsWithoutFaceDetector = 0;
|
this.runsWithoutFaceDetector = 0;
|
||||||
}
|
}
|
||||||
if (detector && detector.boxes) {
|
if (detector && detector.boxes) {
|
||||||
|
@ -165,10 +170,12 @@ class Pipeline {
|
||||||
prediction.box.startPoint.dispose();
|
prediction.box.startPoint.dispose();
|
||||||
prediction.box.endPoint.dispose();
|
prediction.box.endPoint.dispose();
|
||||||
prediction.landmarks.dispose();
|
prediction.landmarks.dispose();
|
||||||
prediction.probability.dispose();
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
let results = tf.tidy(() => this.regionsOfInterest.map((box, i) => {
|
|
||||||
|
// console.log(this.skipped, config.detector.skipFrames, this.detectedFaces, config.detector.maxFaces, detector?.boxes.length, this.storedBoxes.length);
|
||||||
|
|
||||||
|
let results = tf.tidy(() => this.storedBoxes.map((box, i) => {
|
||||||
let angle = 0;
|
let angle = 0;
|
||||||
// The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).
|
// The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).
|
||||||
const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;
|
const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;
|
||||||
|
@ -187,6 +194,19 @@ class Pipeline {
|
||||||
}
|
}
|
||||||
const boxCPU = { startPoint: box.startPoint, endPoint: box.endPoint };
|
const boxCPU = { startPoint: box.startPoint, endPoint: box.endPoint };
|
||||||
const face = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
const face = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
||||||
|
|
||||||
|
// if we're not going to produce mesh, don't spend time with further processing
|
||||||
|
if (!config.mesh.enabled) {
|
||||||
|
const prediction = {
|
||||||
|
coords: null,
|
||||||
|
box,
|
||||||
|
faceConfidence: null,
|
||||||
|
confidence: box.confidence,
|
||||||
|
image: face,
|
||||||
|
};
|
||||||
|
return prediction;
|
||||||
|
}
|
||||||
|
|
||||||
// The first returned tensor represents facial contours, which are included in the coordinates.
|
// The first returned tensor represents facial contours, which are included in the coordinates.
|
||||||
const [, confidence, coords] = this.meshDetector.predict(face);
|
const [, confidence, coords] = this.meshDetector.predict(face);
|
||||||
const confidenceVal = confidence.dataSync()[0];
|
const confidenceVal = confidence.dataSync()[0];
|
||||||
|
@ -224,17 +244,15 @@ class Pipeline {
|
||||||
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
tf.dispose(rawCoords);
|
tf.dispose(rawCoords);
|
||||||
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
||||||
|
const transformedCoords = tf.tensor2d(transformedCoordsData);
|
||||||
const prediction = {
|
const prediction = {
|
||||||
coords: null,
|
coords: transformedCoords,
|
||||||
box: landmarksBox,
|
box: landmarksBox,
|
||||||
confidence: confidenceVal,
|
faceConfidence: confidenceVal,
|
||||||
|
confidence: box.confidence,
|
||||||
image: face,
|
image: face,
|
||||||
};
|
};
|
||||||
if (config.mesh.enabled) {
|
this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };
|
||||||
const transformedCoords = tf.tensor2d(transformedCoordsData);
|
|
||||||
this.regionsOfInterest[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync() };
|
|
||||||
prediction.coords = transformedCoords;
|
|
||||||
}
|
|
||||||
return prediction;
|
return prediction;
|
||||||
}));
|
}));
|
||||||
results = results.filter((a) => a !== null);
|
results = results.filter((a) => a !== null);
|
||||||
|
@ -242,42 +260,6 @@ class Pipeline {
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Updates regions of interest if the intersection over union between the incoming and previous regions falls below a threshold.
|
|
||||||
updateRegionsOfInterest(boxes) {
|
|
||||||
for (let i = 0; i < boxes.length; i++) {
|
|
||||||
const box = boxes[i];
|
|
||||||
const previousBox = this.regionsOfInterest[i];
|
|
||||||
let iou = 0;
|
|
||||||
if (previousBox && previousBox.startPoint) {
|
|
||||||
const [boxStartX, boxStartY] = box.startPoint;
|
|
||||||
const [boxEndX, boxEndY] = box.endPoint;
|
|
||||||
const [previousBoxStartX, previousBoxStartY] = previousBox.startPoint;
|
|
||||||
const [previousBoxEndX, previousBoxEndY] = previousBox.endPoint;
|
|
||||||
const xStartMax = Math.max(boxStartX, previousBoxStartX);
|
|
||||||
const yStartMax = Math.max(boxStartY, previousBoxStartY);
|
|
||||||
const xEndMin = Math.min(boxEndX, previousBoxEndX);
|
|
||||||
const yEndMin = Math.min(boxEndY, previousBoxEndY);
|
|
||||||
const intersection = (xEndMin - xStartMax) * (yEndMin - yStartMax);
|
|
||||||
const boxArea = (boxEndX - boxStartX) * (boxEndY - boxStartY);
|
|
||||||
const previousBoxArea = (previousBoxEndX - previousBoxStartX) * (previousBoxEndY - boxStartY);
|
|
||||||
iou = intersection / (boxArea + previousBoxArea - intersection);
|
|
||||||
}
|
|
||||||
if (iou < UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD) {
|
|
||||||
this.regionsOfInterest[i] = box;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.regionsOfInterest = this.regionsOfInterest.slice(0, boxes.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
clearRegionOfInterest(index) {
|
|
||||||
if (this.regionsOfInterest[index] != null) {
|
|
||||||
this.regionsOfInterest = [
|
|
||||||
...this.regionsOfInterest.slice(0, index),
|
|
||||||
...this.regionsOfInterest.slice(index + 1),
|
|
||||||
];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
calculateLandmarksBoundingBox(landmarks) {
|
calculateLandmarksBoundingBox(landmarks) {
|
||||||
const xs = landmarks.map((d) => d[0]);
|
const xs = landmarks.map((d) => d[0]);
|
||||||
const ys = landmarks.map((d) => d[1]);
|
const ys = landmarks.map((d) => d[1]);
|
||||||
|
|
|
@ -25,17 +25,19 @@ exports.face = (res) => {
|
||||||
// if (face.annotations['rightCheek'] && face.annotations['leftCheek'] && (face.annotations['rightCheek'].length > 0) && (face.annotations['leftCheek'].length > 0)) {
|
// if (face.annotations['rightCheek'] && face.annotations['leftCheek'] && (face.annotations['rightCheek'].length > 0) && (face.annotations['leftCheek'].length > 0)) {
|
||||||
// gestures.push(`facing ${((face.annotations['rightCheek'][0][2] > 0) || (face.annotations['leftCheek'][0][2] < 0)) ? 'right' : 'left'}`);
|
// gestures.push(`facing ${((face.annotations['rightCheek'][0][2] > 0) || (face.annotations['leftCheek'][0][2] < 0)) ? 'right' : 'left'}`);
|
||||||
// }
|
// }
|
||||||
const eyeFacing = face.mesh[35][2] - face.mesh[263][2];
|
if (face.mesh && face.mesh.length > 0) {
|
||||||
if (Math.abs(eyeFacing) < 10) gestures.push('facing camera');
|
const eyeFacing = face.mesh[35][2] - face.mesh[263][2];
|
||||||
else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`);
|
if (Math.abs(eyeFacing) < 10) gestures.push('facing camera');
|
||||||
const openLeft = Math.abs(face.mesh[374][1] - face.mesh[386][1]) / Math.abs(face.mesh[443][1] - face.mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord
|
else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`);
|
||||||
if (openLeft < 0.2) gestures.push('blink left eye');
|
const openLeft = Math.abs(face.mesh[374][1] - face.mesh[386][1]) / Math.abs(face.mesh[443][1] - face.mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord
|
||||||
const openRight = Math.abs(face.mesh[145][1] - face.mesh[159][1]) / Math.abs(face.mesh[223][1] - face.mesh[230][1]); // center of eye inner lid y coord div center of wider eye border y coord
|
if (openLeft < 0.2) gestures.push('blink left eye');
|
||||||
if (openRight < 0.2) gestures.push('blink right eye');
|
const openRight = Math.abs(face.mesh[145][1] - face.mesh[159][1]) / Math.abs(face.mesh[223][1] - face.mesh[230][1]); // center of eye inner lid y coord div center of wider eye border y coord
|
||||||
const mouthOpen = Math.min(100, 500 * Math.abs(face.mesh[13][1] - face.mesh[14][1]) / Math.abs(face.mesh[10][1] - face.mesh[152][1]));
|
if (openRight < 0.2) gestures.push('blink right eye');
|
||||||
if (mouthOpen > 10) gestures.push(`mouth ${Math.trunc(mouthOpen)}% open`);
|
const mouthOpen = Math.min(100, 500 * Math.abs(face.mesh[13][1] - face.mesh[14][1]) / Math.abs(face.mesh[10][1] - face.mesh[152][1]));
|
||||||
const chinDepth = face.mesh[152][2];
|
if (mouthOpen > 10) gestures.push(`mouth ${Math.trunc(mouthOpen)}% open`);
|
||||||
if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`);
|
const chinDepth = face.mesh[152][2];
|
||||||
|
if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return gestures;
|
return gestures;
|
||||||
};
|
};
|
||||||
|
|
|
@ -33,7 +33,7 @@ class HandPipeline {
|
||||||
this.meshDetector = meshDetector;
|
this.meshDetector = meshDetector;
|
||||||
this.inputSize = inputSize;
|
this.inputSize = inputSize;
|
||||||
this.storedBoxes = [];
|
this.storedBoxes = [];
|
||||||
this.skipped = 0;
|
this.skipped = 1000;
|
||||||
this.detectedHands = 0;
|
this.detectedHands = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,7 +91,8 @@ class HandPipeline {
|
||||||
let boxes;
|
let boxes;
|
||||||
if ((this.skipped > config.skipFrames) || !config.landmarks) {
|
if ((this.skipped > config.skipFrames) || !config.landmarks) {
|
||||||
boxes = await this.boxDetector.estimateHandBounds(image, config);
|
boxes = await this.boxDetector.estimateHandBounds(image, config);
|
||||||
this.skipped = 0;
|
// don't reset on test image
|
||||||
|
if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// if detector result count doesn't match current working set, use it to reset current working set
|
// if detector result count doesn't match current working set, use it to reset current working set
|
||||||
|
|
|
@ -373,6 +373,12 @@ class Human {
|
||||||
resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });
|
resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async warmup(userConfig) {
|
||||||
|
const warmup = new ImageData(255, 255);
|
||||||
|
await this.detect(warmup, userConfig);
|
||||||
|
this.log('warmed up');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export { Human as default };
|
export { Human as default };
|
||||||
|
|
|
@ -52,8 +52,10 @@ function process(input, config) {
|
||||||
if (config.filter.polaroid) this.fx.addFilter('polaroid');
|
if (config.filter.polaroid) this.fx.addFilter('polaroid');
|
||||||
if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);
|
if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);
|
||||||
this.fx.apply(inCanvas);
|
this.fx.apply(inCanvas);
|
||||||
|
} else {
|
||||||
|
outCanvas = inCanvas;
|
||||||
}
|
}
|
||||||
if (!outCanvas) outCanvas = inCanvas;
|
// if (!outCanvas) outCanvas = inCanvas;
|
||||||
let pixels;
|
let pixels;
|
||||||
if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {
|
if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {
|
||||||
// tf kernel-optimized method to get imagedata, also if input is imagedata, just use it
|
// tf kernel-optimized method to get imagedata, also if input is imagedata, just use it
|
||||||
|
|
Loading…
Reference in New Issue