fix serious performance bug around skipframes

pull/50/head
Vladimir Mandic 2020-11-17 17:42:44 -05:00
parent 3c0dde0d6b
commit 0b7bc69421
30 changed files with 3436 additions and 3240 deletions

View File

@ -331,7 +331,7 @@ async function processImage(input) {
// just initialize everything and call main function
async function detectVideo() {
human.config.videoOptimized = true;
userConfig.videoOptimized = true;
document.getElementById('samples-container').style.display = 'none';
document.getElementById('canvas').style.display = 'block';
const video = document.getElementById('video');
@ -353,7 +353,7 @@ async function detectVideo() {
// just initialize everything and call main function
async function detectSampleImages() {
document.getElementById('play').style.display = 'none';
human.config.videoOptimized = false;
userConfig.videoOptimized = false;
const size = 12 + Math.trunc(12 * ui.columns * window.innerWidth / document.body.clientWidth);
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);
ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,7 +5,7 @@
"imports": []
},
"demo/browser.js": {
"bytes": 22885,
"bytes": 22881,
"imports": [
{
"path": "dist/human.esm.js"
@ -30,7 +30,7 @@
"imports": []
},
"dist/human.esm.js": {
"bytes": 1838222,
"bytes": 1785092,
"imports": []
}
},
@ -38,29 +38,29 @@
"dist/demo-browser-index.js.map": {
"imports": [],
"inputs": {},
"bytes": 2997188
"bytes": 2672929
},
"dist/demo-browser-index.js": {
"imports": [],
"exports": [],
"inputs": {
"dist/human.esm.js": {
"bytesInOutput": 1831121
"bytesInOutput": 1777972
},
"demo/draw.js": {
"bytesInOutput": 7341
"bytesInOutput": 7284
},
"demo/menu.js": {
"bytesInOutput": 12028
"bytesInOutput": 11952
},
"assets/gl-bench.js": {
"bytesInOutput": 7809
"bytesInOutput": 7731
},
"demo/browser.js": {
"bytesInOutput": 17360
"bytesInOutput": 17051
}
},
"bytes": 1882924
"bytes": 1829255
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,7 +5,7 @@
"imports": []
},
"dist/tfjs.esm.js": {
"bytes": 1586281,
"bytes": 1537126,
"imports": []
},
"package.json": {
@ -159,7 +159,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6942,
"bytes": 6992,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2475,
"bytes": 2455,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
]
},
"src/face/facepipeline.js": {
"bytes": 13802,
"bytes": 13867,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -244,7 +244,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4235,
"bytes": 4275,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
]
},
"src/hand/handpipeline.js": {
"bytes": 7572,
"bytes": 7607,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -269,7 +269,7 @@
]
},
"src/hand/handpose.js": {
"bytes": 3034,
"bytes": 3074,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -290,7 +290,7 @@
"imports": []
},
"src/human.js": {
"bytes": 15702,
"bytes": 15667,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -357,7 +357,7 @@
"dist/human.esm-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 2912224
"bytes": 2588850
},
"dist/human.esm-nobundle.js": {
"imports": [],
@ -366,112 +366,112 @@
],
"inputs": {
"src/face/blazeface.js": {
"bytesInOutput": 5797
"bytesInOutput": 5584
},
"src/face/box.js": {
"bytesInOutput": 1677
"bytesInOutput": 1600
},
"src/face/util.js": {
"bytesInOutput": 2480
"bytesInOutput": 2423
},
"src/face/coords.js": {
"bytesInOutput": 30878
"bytesInOutput": 30819
},
"src/face/facepipeline.js": {
"bytesInOutput": 9895
"bytesInOutput": 9579
},
"src/face/facemesh.js": {
"bytesInOutput": 1983
"bytesInOutput": 1927
},
"src/profile.js": {
"bytesInOutput": 897
"bytesInOutput": 860
},
"src/age/age.js": {
"bytesInOutput": 1271
"bytesInOutput": 1226
},
"src/gender/gender.js": {
"bytesInOutput": 2157
"bytesInOutput": 2074
},
"src/emotion/emotion.js": {
"bytesInOutput": 2001
"bytesInOutput": 1930
},
"src/embedding/embedding.js": {
"bytesInOutput": 1452
"bytesInOutput": 1416
},
"src/body/modelBase.js": {
"bytesInOutput": 655
"bytesInOutput": 626
},
"src/body/modelMobileNet.js": {
"bytesInOutput": 411
"bytesInOutput": 406
},
"src/body/heapSort.js": {
"bytesInOutput": 1147
"bytesInOutput": 1138
},
"src/body/buildParts.js": {
"bytesInOutput": 1353
"bytesInOutput": 1313
},
"src/body/keypoints.js": {
"bytesInOutput": 1822
"bytesInOutput": 1810
},
"src/body/vectors.js": {
"bytesInOutput": 1095
"bytesInOutput": 1058
},
"src/body/decodePose.js": {
"bytesInOutput": 3229
"bytesInOutput": 3131
},
"src/body/decodeMultiple.js": {
"bytesInOutput": 1732
"bytesInOutput": 1682
},
"src/body/util.js": {
"bytesInOutput": 1958
"bytesInOutput": 1923
},
"src/body/modelPoseNet.js": {
"bytesInOutput": 1686
"bytesInOutput": 1603
},
"src/body/posenet.js": {
"bytesInOutput": 863
"bytesInOutput": 834
},
"src/hand/handdetector.js": {
"bytesInOutput": 3234
"bytesInOutput": 3159
},
"src/hand/handpipeline.js": {
"bytesInOutput": 4912
"bytesInOutput": 4754
},
"src/hand/anchors.js": {
"bytesInOutput": 127037
"bytesInOutput": 127032
},
"src/hand/handpose.js": {
"bytesInOutput": 1855
"bytesInOutput": 1857
},
"src/gesture.js": {
"bytesInOutput": 2328
"bytesInOutput": 2255
},
"src/imagefx.js": {
"bytesInOutput": 13892
"bytesInOutput": 13638
},
"src/image.js": {
"bytesInOutput": 4143
"bytesInOutput": 4060
},
"dist/tfjs.esm.js": {
"bytesInOutput": 1579998
"bytesInOutput": 1529542
},
"src/human.js": {
"bytesInOutput": 10836
"bytesInOutput": 10540
},
"src/hand/box.js": {
"bytesInOutput": 1990
"bytesInOutput": 1894
},
"src/hand/util.js": {
"bytesInOutput": 1860
"bytesInOutput": 1808
},
"config.js": {
"bytesInOutput": 1431
"bytesInOutput": 1368
},
"package.json": {
"bytesInOutput": 21
}
},
"bytes": 1838231
"bytes": 1785101
}
}
}

591
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

88
dist/human.esm.json vendored
View File

@ -5,7 +5,7 @@
"imports": []
},
"dist/tfjs.esm.js": {
"bytes": 1586281,
"bytes": 1537126,
"imports": []
},
"package.json": {
@ -159,7 +159,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6942,
"bytes": 6992,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2475,
"bytes": 2455,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
]
},
"src/face/facepipeline.js": {
"bytes": 13802,
"bytes": 13867,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -244,7 +244,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4235,
"bytes": 4275,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
]
},
"src/hand/handpipeline.js": {
"bytes": 7572,
"bytes": 7607,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -269,7 +269,7 @@
]
},
"src/hand/handpose.js": {
"bytes": 3034,
"bytes": 3074,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -290,7 +290,7 @@
"imports": []
},
"src/human.js": {
"bytes": 15702,
"bytes": 15667,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -357,7 +357,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 2912224
"bytes": 2588850
},
"dist/human.esm.js": {
"imports": [],
@ -366,112 +366,112 @@
],
"inputs": {
"src/face/blazeface.js": {
"bytesInOutput": 5797
"bytesInOutput": 5584
},
"src/face/box.js": {
"bytesInOutput": 1677
"bytesInOutput": 1600
},
"src/face/util.js": {
"bytesInOutput": 2480
"bytesInOutput": 2423
},
"src/face/coords.js": {
"bytesInOutput": 30878
"bytesInOutput": 30819
},
"src/face/facepipeline.js": {
"bytesInOutput": 9895
"bytesInOutput": 9579
},
"src/face/facemesh.js": {
"bytesInOutput": 1983
"bytesInOutput": 1927
},
"src/profile.js": {
"bytesInOutput": 897
"bytesInOutput": 860
},
"src/age/age.js": {
"bytesInOutput": 1271
"bytesInOutput": 1226
},
"src/gender/gender.js": {
"bytesInOutput": 2157
"bytesInOutput": 2074
},
"src/emotion/emotion.js": {
"bytesInOutput": 2001
"bytesInOutput": 1930
},
"src/embedding/embedding.js": {
"bytesInOutput": 1452
"bytesInOutput": 1416
},
"src/body/modelBase.js": {
"bytesInOutput": 655
"bytesInOutput": 626
},
"src/body/modelMobileNet.js": {
"bytesInOutput": 411
"bytesInOutput": 406
},
"src/body/heapSort.js": {
"bytesInOutput": 1147
"bytesInOutput": 1138
},
"src/body/buildParts.js": {
"bytesInOutput": 1353
"bytesInOutput": 1313
},
"src/body/keypoints.js": {
"bytesInOutput": 1822
"bytesInOutput": 1810
},
"src/body/vectors.js": {
"bytesInOutput": 1095
"bytesInOutput": 1058
},
"src/body/decodePose.js": {
"bytesInOutput": 3229
"bytesInOutput": 3131
},
"src/body/decodeMultiple.js": {
"bytesInOutput": 1732
"bytesInOutput": 1682
},
"src/body/util.js": {
"bytesInOutput": 1958
"bytesInOutput": 1923
},
"src/body/modelPoseNet.js": {
"bytesInOutput": 1686
"bytesInOutput": 1603
},
"src/body/posenet.js": {
"bytesInOutput": 863
"bytesInOutput": 834
},
"src/hand/handdetector.js": {
"bytesInOutput": 3234
"bytesInOutput": 3159
},
"src/hand/handpipeline.js": {
"bytesInOutput": 4912
"bytesInOutput": 4754
},
"src/hand/anchors.js": {
"bytesInOutput": 127037
"bytesInOutput": 127032
},
"src/hand/handpose.js": {
"bytesInOutput": 1855
"bytesInOutput": 1857
},
"src/gesture.js": {
"bytesInOutput": 2328
"bytesInOutput": 2255
},
"src/imagefx.js": {
"bytesInOutput": 13892
"bytesInOutput": 13638
},
"src/image.js": {
"bytesInOutput": 4143
"bytesInOutput": 4060
},
"dist/tfjs.esm.js": {
"bytesInOutput": 1579998
"bytesInOutput": 1529542
},
"src/human.js": {
"bytesInOutput": 10836
"bytesInOutput": 10540
},
"src/hand/box.js": {
"bytesInOutput": 1990
"bytesInOutput": 1894
},
"src/hand/util.js": {
"bytesInOutput": 1860
"bytesInOutput": 1808
},
"config.js": {
"bytesInOutput": 1431
"bytesInOutput": 1368
},
"package.json": {
"bytesInOutput": 21
}
},
"bytes": 1838222
"bytes": 1785092
}
}
}

591
dist/human.js vendored

File diff suppressed because one or more lines are too long

6
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

88
dist/human.json vendored
View File

@ -5,7 +5,7 @@
"imports": []
},
"dist/tfjs.esm.js": {
"bytes": 1586281,
"bytes": 1537126,
"imports": []
},
"package.json": {
@ -159,7 +159,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6942,
"bytes": 6992,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2475,
"bytes": 2455,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
]
},
"src/face/facepipeline.js": {
"bytes": 13802,
"bytes": 13867,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -244,7 +244,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4235,
"bytes": 4275,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
]
},
"src/hand/handpipeline.js": {
"bytes": 7572,
"bytes": 7607,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -269,7 +269,7 @@
]
},
"src/hand/handpose.js": {
"bytes": 3034,
"bytes": 3074,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -290,7 +290,7 @@
"imports": []
},
"src/human.js": {
"bytes": 15702,
"bytes": 15667,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -357,119 +357,119 @@
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 2879915
"bytes": 2556594
},
"dist/human.js": {
"imports": [],
"exports": [],
"inputs": {
"src/face/blazeface.js": {
"bytesInOutput": 5773
"bytesInOutput": 5560
},
"src/face/box.js": {
"bytesInOutput": 1677
"bytesInOutput": 1600
},
"src/face/util.js": {
"bytesInOutput": 2480
"bytesInOutput": 2423
},
"src/face/coords.js": {
"bytesInOutput": 30878
"bytesInOutput": 30819
},
"src/face/facepipeline.js": {
"bytesInOutput": 9873
"bytesInOutput": 9557
},
"src/face/facemesh.js": {
"bytesInOutput": 1964
"bytesInOutput": 1910
},
"src/profile.js": {
"bytesInOutput": 887
"bytesInOutput": 850
},
"src/age/age.js": {
"bytesInOutput": 1254
"bytesInOutput": 1209
},
"src/gender/gender.js": {
"bytesInOutput": 2133
"bytesInOutput": 2050
},
"src/emotion/emotion.js": {
"bytesInOutput": 1980
"bytesInOutput": 1909
},
"src/embedding/embedding.js": {
"bytesInOutput": 1430
"bytesInOutput": 1394
},
"src/body/modelBase.js": {
"bytesInOutput": 655
"bytesInOutput": 626
},
"src/body/modelMobileNet.js": {
"bytesInOutput": 411
"bytesInOutput": 406
},
"src/body/heapSort.js": {
"bytesInOutput": 1147
"bytesInOutput": 1138
},
"src/body/buildParts.js": {
"bytesInOutput": 1353
"bytesInOutput": 1313
},
"src/body/keypoints.js": {
"bytesInOutput": 1822
"bytesInOutput": 1810
},
"src/body/vectors.js": {
"bytesInOutput": 1095
"bytesInOutput": 1058
},
"src/body/decodePose.js": {
"bytesInOutput": 3229
"bytesInOutput": 3131
},
"src/body/decodeMultiple.js": {
"bytesInOutput": 1732
"bytesInOutput": 1682
},
"src/body/util.js": {
"bytesInOutput": 1958
"bytesInOutput": 1923
},
"src/body/modelPoseNet.js": {
"bytesInOutput": 1673
"bytesInOutput": 1590
},
"src/body/posenet.js": {
"bytesInOutput": 863
"bytesInOutput": 834
},
"src/hand/handdetector.js": {
"bytesInOutput": 3223
"bytesInOutput": 3148
},
"src/hand/handpipeline.js": {
"bytesInOutput": 4903
"bytesInOutput": 4745
},
"src/hand/anchors.js": {
"bytesInOutput": 127037
"bytesInOutput": 127032
},
"src/hand/handpose.js": {
"bytesInOutput": 1840
"bytesInOutput": 1842
},
"src/gesture.js": {
"bytesInOutput": 2308
"bytesInOutput": 2235
},
"src/imagefx.js": {
"bytesInOutput": 13892
"bytesInOutput": 13638
},
"src/image.js": {
"bytesInOutput": 4109
"bytesInOutput": 4026
},
"src/human.js": {
"bytesInOutput": 10916
"bytesInOutput": 10603
},
"dist/tfjs.esm.js": {
"bytesInOutput": 1579532
"bytesInOutput": 1529076
},
"src/hand/box.js": {
"bytesInOutput": 1990
"bytesInOutput": 1894
},
"src/hand/util.js": {
"bytesInOutput": 1860
"bytesInOutput": 1808
},
"config.js": {
"bytesInOutput": 1431
"bytesInOutput": 1368
},
"package.json": {
"bytesInOutput": 21
}
},
"bytes": 1837720
"bytes": 1784570
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,7 +5,7 @@
"imports": []
},
"dist/tfjs.esm.js": {
"bytes": 1586281,
"bytes": 1537126,
"imports": []
},
"package.json": {
@ -159,7 +159,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6942,
"bytes": 6992,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2475,
"bytes": 2455,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
]
},
"src/face/facepipeline.js": {
"bytes": 13802,
"bytes": 13867,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -244,7 +244,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4235,
"bytes": 4275,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
]
},
"src/hand/handpipeline.js": {
"bytes": 7572,
"bytes": 7607,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -269,7 +269,7 @@
]
},
"src/hand/handpose.js": {
"bytes": 3034,
"bytes": 3074,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -290,7 +290,7 @@
"imports": []
},
"src/human.js": {
"bytes": 15702,
"bytes": 15667,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -357,119 +357,119 @@
"dist/human.node-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 2928376
"bytes": 2604967
},
"dist/human.node-nobundle.js": {
"imports": [],
"exports": [],
"inputs": {
"src/face/blazeface.js": {
"bytesInOutput": 5801
"bytesInOutput": 5588
},
"src/face/box.js": {
"bytesInOutput": 1684
"bytesInOutput": 1607
},
"src/face/util.js": {
"bytesInOutput": 2491
"bytesInOutput": 2434
},
"src/face/coords.js": {
"bytesInOutput": 30889
"bytesInOutput": 30830
},
"src/face/facepipeline.js": {
"bytesInOutput": 9897
"bytesInOutput": 9581
},
"src/face/facemesh.js": {
"bytesInOutput": 1987
"bytesInOutput": 1931
},
"src/profile.js": {
"bytesInOutput": 899
"bytesInOutput": 862
},
"src/age/age.js": {
"bytesInOutput": 1274
"bytesInOutput": 1229
},
"src/gender/gender.js": {
"bytesInOutput": 2160
"bytesInOutput": 2077
},
"src/emotion/emotion.js": {
"bytesInOutput": 2004
"bytesInOutput": 1933
},
"src/embedding/embedding.js": {
"bytesInOutput": 1456
"bytesInOutput": 1420
},
"src/body/modelBase.js": {
"bytesInOutput": 657
"bytesInOutput": 628
},
"src/body/modelMobileNet.js": {
"bytesInOutput": 413
"bytesInOutput": 408
},
"src/body/heapSort.js": {
"bytesInOutput": 1149
"bytesInOutput": 1140
},
"src/body/buildParts.js": {
"bytesInOutput": 1355
"bytesInOutput": 1315
},
"src/body/keypoints.js": {
"bytesInOutput": 1833
"bytesInOutput": 1821
},
"src/body/vectors.js": {
"bytesInOutput": 1103
"bytesInOutput": 1066
},
"src/body/decodePose.js": {
"bytesInOutput": 3231
"bytesInOutput": 3133
},
"src/body/decodeMultiple.js": {
"bytesInOutput": 1734
"bytesInOutput": 1684
},
"src/body/util.js": {
"bytesInOutput": 1966
"bytesInOutput": 1931
},
"src/body/modelPoseNet.js": {
"bytesInOutput": 1689
"bytesInOutput": 1606
},
"src/body/posenet.js": {
"bytesInOutput": 877
"bytesInOutput": 848
},
"src/hand/handdetector.js": {
"bytesInOutput": 3236
"bytesInOutput": 3161
},
"src/hand/handpipeline.js": {
"bytesInOutput": 4914
"bytesInOutput": 4756
},
"src/hand/anchors.js": {
"bytesInOutput": 127039
"bytesInOutput": 127034
},
"src/hand/handpose.js": {
"bytesInOutput": 1858
},
"src/gesture.js": {
"bytesInOutput": 2332
},
"src/imagefx.js": {
"bytesInOutput": 13894
},
"src/image.js": {
"bytesInOutput": 4145
},
"src/human.js": {
"bytesInOutput": 10874
},
"dist/tfjs.esm.js": {
"bytesInOutput": 1580213
},
"src/hand/box.js": {
"bytesInOutput": 1990
},
"src/hand/util.js": {
"bytesInOutput": 1860
},
"src/gesture.js": {
"bytesInOutput": 2259
},
"src/imagefx.js": {
"bytesInOutput": 13640
},
"src/image.js": {
"bytesInOutput": 4063
},
"src/human.js": {
"bytesInOutput": 10578
},
"dist/tfjs.esm.js": {
"bytesInOutput": 1529761
},
"src/hand/box.js": {
"bytesInOutput": 1894
},
"src/hand/util.js": {
"bytesInOutput": 1808
},
"config.js": {
"bytesInOutput": 1431
"bytesInOutput": 1368
},
"package.json": {
"bytesInOutput": 21
}
},
"bytes": 1838736
"bytes": 1785604
}
}
}

591
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

114
dist/human.node.json vendored
View File

@ -5,7 +5,7 @@
"imports": []
},
"dist/tfjs.esm.js": {
"bytes": 1586281,
"bytes": 1537126,
"imports": []
},
"package.json": {
@ -159,7 +159,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6942,
"bytes": 6992,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2475,
"bytes": 2455,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
]
},
"src/face/facepipeline.js": {
"bytes": 13802,
"bytes": 13867,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -244,7 +244,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4235,
"bytes": 4275,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
]
},
"src/hand/handpipeline.js": {
"bytes": 7572,
"bytes": 7607,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -269,7 +269,7 @@
]
},
"src/hand/handpose.js": {
"bytes": 3034,
"bytes": 3074,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -290,7 +290,7 @@
"imports": []
},
"src/human.js": {
"bytes": 15702,
"bytes": 15667,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -357,119 +357,119 @@
"dist/human.node.js.map": {
"imports": [],
"inputs": {},
"bytes": 2928376
"bytes": 2604967
},
"dist/human.node.js": {
"imports": [],
"exports": [],
"inputs": {
"src/face/blazeface.js": {
"bytesInOutput": 5801
"bytesInOutput": 5588
},
"src/face/box.js": {
"bytesInOutput": 1684
"bytesInOutput": 1607
},
"src/face/util.js": {
"bytesInOutput": 2491
"bytesInOutput": 2434
},
"src/face/coords.js": {
"bytesInOutput": 30889
"bytesInOutput": 30830
},
"src/face/facepipeline.js": {
"bytesInOutput": 9897
"bytesInOutput": 9581
},
"src/face/facemesh.js": {
"bytesInOutput": 1987
"bytesInOutput": 1931
},
"src/profile.js": {
"bytesInOutput": 899
"bytesInOutput": 862
},
"src/age/age.js": {
"bytesInOutput": 1274
"bytesInOutput": 1229
},
"src/gender/gender.js": {
"bytesInOutput": 2160
"bytesInOutput": 2077
},
"src/emotion/emotion.js": {
"bytesInOutput": 2004
"bytesInOutput": 1933
},
"src/embedding/embedding.js": {
"bytesInOutput": 1456
"bytesInOutput": 1420
},
"src/body/modelBase.js": {
"bytesInOutput": 657
"bytesInOutput": 628
},
"src/body/modelMobileNet.js": {
"bytesInOutput": 413
"bytesInOutput": 408
},
"src/body/heapSort.js": {
"bytesInOutput": 1149
"bytesInOutput": 1140
},
"src/body/buildParts.js": {
"bytesInOutput": 1355
"bytesInOutput": 1315
},
"src/body/keypoints.js": {
"bytesInOutput": 1833
"bytesInOutput": 1821
},
"src/body/vectors.js": {
"bytesInOutput": 1103
"bytesInOutput": 1066
},
"src/body/decodePose.js": {
"bytesInOutput": 3231
"bytesInOutput": 3133
},
"src/body/decodeMultiple.js": {
"bytesInOutput": 1734
"bytesInOutput": 1684
},
"src/body/util.js": {
"bytesInOutput": 1966
"bytesInOutput": 1931
},
"src/body/modelPoseNet.js": {
"bytesInOutput": 1689
"bytesInOutput": 1606
},
"src/body/posenet.js": {
"bytesInOutput": 877
"bytesInOutput": 848
},
"src/hand/handdetector.js": {
"bytesInOutput": 3236
"bytesInOutput": 3161
},
"src/hand/handpipeline.js": {
"bytesInOutput": 4914
"bytesInOutput": 4756
},
"src/hand/anchors.js": {
"bytesInOutput": 127039
"bytesInOutput": 127034
},
"src/hand/handpose.js": {
"bytesInOutput": 1858
},
"src/gesture.js": {
"bytesInOutput": 2332
},
"src/imagefx.js": {
"bytesInOutput": 13894
},
"src/image.js": {
"bytesInOutput": 4145
},
"src/human.js": {
"bytesInOutput": 10874
},
"dist/tfjs.esm.js": {
"bytesInOutput": 1580213
},
"src/hand/box.js": {
"bytesInOutput": 1990
},
"src/hand/util.js": {
"bytesInOutput": 1860
},
"src/gesture.js": {
"bytesInOutput": 2259
},
"src/imagefx.js": {
"bytesInOutput": 13640
},
"src/image.js": {
"bytesInOutput": 4063
},
"src/human.js": {
"bytesInOutput": 10578
},
"dist/tfjs.esm.js": {
"bytesInOutput": 1529761
},
"src/hand/box.js": {
"bytesInOutput": 1894
},
"src/hand/util.js": {
"bytesInOutput": 1808
},
"config.js": {
"bytesInOutput": 1431
"bytesInOutput": 1368
},
"package.json": {
"bytesInOutput": 21
}
},
"bytes": 1838727
"bytes": 1785595
}
}
}

584
dist/tfjs.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1884
dist/tfjs.esm.json vendored

File diff suppressed because it is too large Load Diff

View File

@ -10,6 +10,7 @@ let es;
// common configuration
const common = {
minifyWhitespace: true,
minifySyntax: true,
bundle: true,
sourcemap: true,
logLevel: 'error',

View File

@ -67,9 +67,9 @@ function scaleBoxFromPrediction(face, scaleFactor) {
class BlazeFaceModel {
constructor(model, config) {
this.blazeFaceModel = model;
this.width = config.detector.inputSize;
this.height = config.detector.inputSize;
this.anchorsData = generateAnchors(config.detector.inputSize);
this.width = config.face.detector.inputSize;
this.height = config.face.detector.inputSize;
this.anchorsData = generateAnchors(config.face.detector.inputSize);
this.anchors = tf.tensor2d(this.anchorsData);
this.inputSize = tf.tensor1d([this.width, this.height]);
this.config = config;
@ -100,7 +100,7 @@ class BlazeFaceModel {
const scoresOut = tf.sigmoid(logits).squeeze();
return [prediction, decodedBounds, scoresOut];
});
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxFaces, this.config.face.detector.iouThreshold, this.config.face.detector.scoreThreshold);
const boxIndices = boxIndicesTensor.arraySync();
boxIndicesTensor.dispose();
const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
@ -115,7 +115,7 @@ class BlazeFaceModel {
for (const i in boundingBoxes) {
const boxIndex = boxIndices[i];
const confidence = scoresVal[boxIndex];
if (confidence > this.config.detector.minConfidence) {
if (confidence > this.config.face.detector.minConfidence) {
const box = createBox(boundingBoxes[i]);
const anchor = this.anchorsData[boxIndex];
const landmarks = tf.tidy(() => tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1]));
@ -164,10 +164,10 @@ class BlazeFaceModel {
}
async function load(config) {
const blazeface = await tf.loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') });
const blazeface = await tf.loadGraphModel(config.face.detector.modelPath, { fromTFHub: config.face.detector.modelPath.includes('tfhub.dev') });
const model = new BlazeFaceModel(blazeface, config);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.detector.modelPath.match(/\/(.*)\./)[1]}`);
console.log(`Human: load model: ${config.face.detector.modelPath.match(/\/(.*)\./)[1]}`);
return model;
}

View File

@ -6,11 +6,10 @@ import * as coords from './coords.js';
class MediaPipeFaceMesh {
constructor(blazeFace, blazeMeshModel, irisModel, config) {
this.pipeline = new pipe.Pipeline(blazeFace, blazeMeshModel, irisModel, config);
if (config) this.config = config;
this.config = config;
}
async estimateFaces(input, config) {
if (config) this.config = config;
const predictions = await this.pipeline.predict(input, config);
const results = [];
for (const prediction of (predictions || [])) {
@ -20,7 +19,7 @@ class MediaPipeFaceMesh {
const annotations = {};
if (mesh && mesh.length > 0) {
for (const key in coords.MESH_ANNOTATIONS) {
if (this.config.iris.enabled || key.includes('Iris') === false) {
if (config.face.iris.enabled || key.includes('Iris') === false) {
annotations[key] = coords.MESH_ANNOTATIONS[key].map((index) => mesh[index]);
}
}
@ -42,14 +41,14 @@ class MediaPipeFaceMesh {
async function load(config) {
const models = await Promise.all([
blazeface.load(config),
tf.loadGraphModel(config.mesh.modelPath, { fromTFHub: config.mesh.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.iris.modelPath, { fromTFHub: config.iris.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.face.mesh.modelPath, { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.face.iris.modelPath, { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }),
]);
const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.mesh.modelPath.match(/\/(.*)\./)[1]}`);
console.log(`Human: load model: ${config.face.mesh.modelPath.match(/\/(.*)\./)[1]}`);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.iris.modelPath.match(/\/(.*)\./)[1]}`);
console.log(`Human: load model: ${config.face.iris.modelPath.match(/\/(.*)\./)[1]}`);
return faceMesh;
}

View File

@ -45,9 +45,9 @@ class Pipeline {
this.boundingBoxDetector = boundingBoxDetector;
this.meshDetector = meshDetector;
this.irisModel = irisModel;
this.meshWidth = config.mesh.inputSize;
this.meshHeight = config.mesh.inputSize;
this.irisSize = config.iris.inputSize;
this.meshWidth = config.face.mesh.inputSize;
this.meshHeight = config.face.mesh.inputSize;
this.irisSize = config.face.iris.inputSize;
this.irisEnlarge = 2.3;
this.skipped = 1000;
this.detectedFaces = 0;
@ -134,14 +134,14 @@ class Pipeline {
let useFreshBox = false;
// run new detector every skipFrames unless we only want box to start with
let detector;
if ((this.skipped > config.detector.skipFrames) || !config.mesh.enabled || !config.videoOptimized) {
if ((this.skipped > config.face.detector.skipFrames) || !config.face.mesh.enabled || !config.videoOptimized) {
detector = await this.boundingBoxDetector.getBoundingBoxes(input);
// don't reset on test image
if ((input.shape[1] !== 255) && (input.shape[2] !== 255)) this.skipped = 0;
}
// if detector result count doesn't match current working set, use it to reset current working set
if (detector && detector.boxes && (detector.boxes.length > 0) && (!config.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.detector.maxFaces))) {
if (detector && detector.boxes && (detector.boxes.length > 0) && (!config.face.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.face.detector.maxFaces))) {
this.storedBoxes = [];
this.detectedFaces = 0;
for (const possible of detector.boxes) {
@ -173,7 +173,7 @@ class Pipeline {
});
}
// console.log(this.skipped, config.detector.skipFrames, this.detectedFaces, config.detector.maxFaces, detector?.boxes.length, this.storedBoxes.length);
// console.log(this.skipped, config.face.detector.skipFrames, this.detectedFaces, config.face.detector.maxFaces, detector?.boxes.length, this.storedBoxes.length);
let results = tf.tidy(() => this.storedBoxes.map((box, i) => {
let angle = 0;
@ -193,10 +193,10 @@ class Pipeline {
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
}
const face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
const outputFace = config.detector.rotation ? tf.image.rotateWithOffset(face, angle) : face;
const outputFace = config.face.detector.rotation ? tf.image.rotateWithOffset(face, angle) : face;
// if we're not going to produce mesh, don't spend time with further processing
if (!config.mesh.enabled) {
if (!config.face.mesh.enabled) {
const prediction = {
coords: null,
box,
@ -211,13 +211,13 @@ class Pipeline {
const [, confidence, contourCoords] = this.meshDetector.predict(face);
const confidenceVal = confidence.dataSync()[0];
confidence.dispose();
if (confidenceVal < config.detector.minConfidence) {
if (confidenceVal < config.face.detector.minConfidence) {
contourCoords.dispose();
return null;
}
const coordsReshaped = tf.reshape(contourCoords, [-1, 3]);
let rawCoords = coordsReshaped.arraySync();
if (config.iris.enabled) {
if (config.face.iris.enabled) {
const { box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop } = this.getEyeBox(rawCoords, face, LEFT_EYE_BOUNDS[0], LEFT_EYE_BOUNDS[1], true);
const { box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop } = this.getEyeBox(rawCoords, face, RIGHT_EYE_BOUNDS[0], RIGHT_EYE_BOUNDS[1]);
const eyePredictions = (this.irisModel.predict(tf.concat([leftEyeCrop, rightEyeCrop])));

View File

@ -55,14 +55,14 @@ class HandDetector {
const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);
const boxes = this.normalizeBoxes(rawBoxes);
rawBoxes.dispose();
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold);
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxHands, config.hand.iouThreshold, config.hand.scoreThreshold);
const filtered = filteredT.arraySync();
scores.dispose();
filteredT.dispose();
const hands = [];
for (const boxIndex of filtered) {
if (scoresVal[boxIndex] >= config.minConfidence) {
if (scoresVal[boxIndex] >= config.hand.minConfidence) {
const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);
const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]);
const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));
@ -78,7 +78,7 @@ class HandDetector {
async estimateHandBounds(input, config) {
const inputHeight = input.shape[1];
const inputWidth = input.shape[2];
const image = tf.tidy(() => input.resizeBilinear([config.inputSize, config.inputSize]).div(127.5).sub(1));
const image = tf.tidy(() => input.resizeBilinear([config.hand.inputSize, config.hand.inputSize]).div(127.5).sub(1));
const predictions = await this.getBoxes(image, config);
image.dispose();
if (!predictions || predictions.length === 0) return null;
@ -90,7 +90,7 @@ class HandDetector {
const palmLandmarks = prediction.palmLandmarks.arraySync();
prediction.box.dispose();
prediction.palmLandmarks.dispose();
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / config.inputSize, inputHeight / config.inputSize]));
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / config.hand.inputSize, inputHeight / config.hand.inputSize]));
}
return hands;
}

View File

@ -89,27 +89,27 @@ class HandPipeline {
// run new detector every skipFrames unless we only want box to start with
let boxes;
if ((this.skipped > config.skipFrames) || !config.landmarks || !config.videoOptimized) {
if ((this.skipped > config.hand.skipFrames) || !config.hand.landmarks || !config.videoOptimized) {
boxes = await this.boxDetector.estimateHandBounds(image, config);
// don't reset on test image
if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;
}
// if detector result count doesn't match current working set, use it to reset current working set
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.maxHands) || !config.landmarks)) {
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxHands) || !config.hand.landmarks)) {
this.storedBoxes = [];
this.detectedHands = 0;
for (const possible of boxes) this.storedBoxes.push(possible);
if (this.storedBoxes.length > 0) useFreshBox = true;
}
const hands = [];
// console.log(`skipped: ${this.skipped} max: ${config.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
// console.log(`skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
// go through working set of boxes
for (const i in this.storedBoxes) {
const currentBox = this.storedBoxes[i];
if (!currentBox) continue;
if (config.landmarks) {
if (config.hand.landmarks) {
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
const palmCenter = box.getBoxCenter(currentBox);
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
@ -124,7 +124,7 @@ class HandPipeline {
handImage.dispose();
const confidenceValue = confidence.dataSync()[0];
confidence.dispose();
if (confidenceValue >= config.minConfidence) {
if (confidenceValue >= config.hand.minConfidence) {
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync();
keypoints.dispose();

View File

@ -69,16 +69,16 @@ exports.HandPose = HandPose;
async function load(config) {
const [handDetectorModel, handPoseModel] = await Promise.all([
tf.loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }),
]);
const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors);
const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize);
const detector = new handdetector.HandDetector(handDetectorModel, config.hand.inputSize, anchors.anchors);
const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.hand.inputSize);
const handpose = new HandPose(pipe);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.detector.modelPath.match(/\/(.*)\./)[1]}`);
console.log(`Human: load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
console.log(`Human: load model: ${config.hand.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
return handpose;
}
exports.load = load;

View File

@ -132,22 +132,22 @@ class Human {
this.models.posenet,
this.models.handpose,
] = await Promise.all([
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config) : null),
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
]);
} else {
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config);
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.load || 0)) this.perf.load = current;
@ -207,7 +207,7 @@ class Human {
const faceRes = [];
this.state = 'run:face';
timeStamp = now();
const faces = await this.models.facemesh.estimateFaces(input, this.config.face);
const faces = await this.models.facemesh.estimateFaces(input, this.config);
this.perf.face = Math.trunc(now() - timeStamp);
for (const face of faces) {
this.analyze('Get Face');
@ -378,12 +378,12 @@ class Human {
// run handpose
this.analyze('Start Hand:');
if (this.config.async) {
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config) : [];
if (this.perf.hand) delete this.perf.hand;
} else {
this.state = 'run:hand';
timeStamp = now();
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config) : [];
this.perf.hand = Math.trunc(now() - timeStamp);
}
// this.analyze('End Hand:');