mirror of https://github.com/vladmandic/human
redo hand detection
parent
ced772bb43
commit
b8c9687b29
File diff suppressed because one or more lines are too long
|
@ -16,7 +16,6 @@
|
|||
<!-- <script src="../assets/tf.es2017.js"></script> -->
|
||||
<!-- <script src="../assets/tf-backend-wasm.es2017.js"></script> -->
|
||||
<!-- <script src="../assets/tf-backend-webgpu.js"></script> -->
|
||||
<!-- <script src='../assets/tfjs-vis.min.js'></script> -->
|
||||
<script src="./browser.js" type="module"></script>
|
||||
<style>
|
||||
body { margin: 0; background: black; color: white; font-family: 'Segoe UI'; font-size: 16px; font-variant: small-caps; overflow-x: hidden; scrollbar-width: none; }
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -25,7 +25,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/facemesh/facemesh.js": {
|
||||
"bytes": 2568,
|
||||
"bytes": 2572,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/blazeface.js"
|
||||
|
@ -34,7 +34,7 @@
|
|||
"path": "src/facemesh/keypoints.js"
|
||||
},
|
||||
{
|
||||
"path": "src/facemesh/pipeline.js"
|
||||
"path": "src/facemesh/facepipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/facemesh/uvcoords.js"
|
||||
|
@ -44,12 +44,8 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/facemesh/keypoints.js": {
|
||||
"bytes": 2507,
|
||||
"imports": []
|
||||
},
|
||||
"src/facemesh/pipeline.js": {
|
||||
"bytes": 14262,
|
||||
"src/facemesh/facepipeline.js": {
|
||||
"bytes": 14296,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/box.js"
|
||||
|
@ -62,6 +58,10 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/facemesh/keypoints.js": {
|
||||
"bytes": 2507,
|
||||
"imports": []
|
||||
},
|
||||
"src/facemesh/triangulation.js": {
|
||||
"bytes": 12940,
|
||||
"imports": []
|
||||
|
@ -79,40 +79,19 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/handpose/box.js": {
|
||||
"bytes": 2572,
|
||||
"bytes": 3192,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytes": 4077,
|
||||
"bytes": 4484,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/box.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 1849,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/keypoints.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/pipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/anchors.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/keypoints.js": {
|
||||
"bytes": 193,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/pipeline.js": {
|
||||
"bytes": 8216,
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytes": 8058,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/box.js"
|
||||
|
@ -122,12 +101,26 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 2772,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/handpipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/anchors.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytes": 2488,
|
||||
"bytes": 3030,
|
||||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 15164,
|
||||
"bytes": 15209,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/facemesh.js"
|
||||
|
@ -282,7 +275,7 @@
|
|||
"dist/human.esm-nobundle.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 605307
|
||||
"bytes": 607262
|
||||
},
|
||||
"dist/human.esm-nobundle.js": {
|
||||
"imports": [],
|
||||
|
@ -299,8 +292,8 @@
|
|||
"src/facemesh/util.js": {
|
||||
"bytesInOutput": 1171
|
||||
},
|
||||
"src/facemesh/pipeline.js": {
|
||||
"bytesInOutput": 5571
|
||||
"src/facemesh/facepipeline.js": {
|
||||
"bytesInOutput": 5585
|
||||
},
|
||||
"src/facemesh/uvcoords.js": {
|
||||
"bytesInOutput": 16785
|
||||
|
@ -318,7 +311,7 @@
|
|||
"bytesInOutput": 1236
|
||||
},
|
||||
"src/emotion/emotion.js": {
|
||||
"bytesInOutput": 1104
|
||||
"bytesInOutput": 1098
|
||||
},
|
||||
"src/posenet/modelBase.js": {
|
||||
"bytesInOutput": 455
|
||||
|
@ -333,46 +326,43 @@
|
|||
"bytesInOutput": 546
|
||||
},
|
||||
"src/posenet/keypoints.js": {
|
||||
"bytesInOutput": 1621
|
||||
"bytesInOutput": 1620
|
||||
},
|
||||
"src/posenet/vectors.js": {
|
||||
"bytesInOutput": 607
|
||||
"bytesInOutput": 606
|
||||
},
|
||||
"src/posenet/decodePose.js": {
|
||||
"bytesInOutput": 1016
|
||||
"bytesInOutput": 1015
|
||||
},
|
||||
"src/posenet/decodeMultiple.js": {
|
||||
"bytesInOutput": 603
|
||||
},
|
||||
"src/posenet/util.js": {
|
||||
"bytesInOutput": 1053
|
||||
"bytesInOutput": 1052
|
||||
},
|
||||
"src/posenet/modelPoseNet.js": {
|
||||
"bytesInOutput": 841
|
||||
},
|
||||
"src/posenet/posenet.js": {
|
||||
"bytesInOutput": 459
|
||||
"bytesInOutput": 458
|
||||
},
|
||||
"src/handpose/box.js": {
|
||||
"bytesInOutput": 1376
|
||||
"bytesInOutput": 1420
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytesInOutput": 2004
|
||||
},
|
||||
"src/handpose/keypoints.js": {
|
||||
"bytesInOutput": 155
|
||||
"bytesInOutput": 1891
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytesInOutput": 972
|
||||
"bytesInOutput": 997
|
||||
},
|
||||
"src/handpose/pipeline.js": {
|
||||
"bytesInOutput": 3223
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytesInOutput": 3233
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
"bytesInOutput": 127000
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytesInOutput": 1014
|
||||
"bytesInOutput": 1112
|
||||
},
|
||||
"src/imagefx.js": {
|
||||
"bytesInOutput": 11013
|
||||
|
@ -384,13 +374,13 @@
|
|||
"bytesInOutput": 2567
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 8439
|
||||
"bytesInOutput": 8467
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 0
|
||||
}
|
||||
},
|
||||
"bytes": 211614
|
||||
"bytes": 212257
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -180,7 +180,7 @@
|
|||
]
|
||||
},
|
||||
"src/facemesh/facemesh.js": {
|
||||
"bytes": 2568,
|
||||
"bytes": 2572,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -192,7 +192,7 @@
|
|||
"path": "src/facemesh/keypoints.js"
|
||||
},
|
||||
{
|
||||
"path": "src/facemesh/pipeline.js"
|
||||
"path": "src/facemesh/facepipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/facemesh/uvcoords.js"
|
||||
|
@ -202,12 +202,8 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/facemesh/keypoints.js": {
|
||||
"bytes": 2507,
|
||||
"imports": []
|
||||
},
|
||||
"src/facemesh/pipeline.js": {
|
||||
"bytes": 14262,
|
||||
"src/facemesh/facepipeline.js": {
|
||||
"bytes": 14296,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -223,6 +219,10 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/facemesh/keypoints.js": {
|
||||
"bytes": 2507,
|
||||
"imports": []
|
||||
},
|
||||
"src/facemesh/triangulation.js": {
|
||||
"bytes": 12940,
|
||||
"imports": []
|
||||
|
@ -240,7 +240,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/handpose/box.js": {
|
||||
"bytes": 2572,
|
||||
"bytes": 3192,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -248,7 +248,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytes": 4077,
|
||||
"bytes": 4484,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -258,32 +258,8 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 1849,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/keypoints.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/pipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/anchors.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/keypoints.js": {
|
||||
"bytes": 193,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/pipeline.js": {
|
||||
"bytes": 8216,
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytes": 8058,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -296,12 +272,29 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 2772,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/handpipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/anchors.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytes": 2488,
|
||||
"bytes": 3030,
|
||||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 15164,
|
||||
"bytes": 15209,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -488,7 +481,7 @@
|
|||
"dist/human.esm.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5401089
|
||||
"bytes": 5403044
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"imports": [],
|
||||
|
@ -562,8 +555,8 @@
|
|||
"src/facemesh/util.js": {
|
||||
"bytesInOutput": 1190
|
||||
},
|
||||
"src/facemesh/pipeline.js": {
|
||||
"bytesInOutput": 5563
|
||||
"src/facemesh/facepipeline.js": {
|
||||
"bytesInOutput": 5577
|
||||
},
|
||||
"src/facemesh/uvcoords.js": {
|
||||
"bytesInOutput": 16786
|
||||
|
@ -617,25 +610,22 @@
|
|||
"bytesInOutput": 474
|
||||
},
|
||||
"src/handpose/box.js": {
|
||||
"bytesInOutput": 1362
|
||||
"bytesInOutput": 1398
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytesInOutput": 2010
|
||||
},
|
||||
"src/handpose/keypoints.js": {
|
||||
"bytesInOutput": 156
|
||||
"bytesInOutput": 1900
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytesInOutput": 988
|
||||
"bytesInOutput": 1005
|
||||
},
|
||||
"src/handpose/pipeline.js": {
|
||||
"bytesInOutput": 3221
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytesInOutput": 3231
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
"bytesInOutput": 127001
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytesInOutput": 992
|
||||
"bytesInOutput": 1090
|
||||
},
|
||||
"src/imagefx.js": {
|
||||
"bytesInOutput": 11014
|
||||
|
@ -647,13 +637,13 @@
|
|||
"bytesInOutput": 2568
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 8455
|
||||
"bytesInOutput": 8484
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 0
|
||||
}
|
||||
},
|
||||
"bytes": 1273160
|
||||
"bytes": 1273801
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -180,7 +180,7 @@
|
|||
]
|
||||
},
|
||||
"src/facemesh/facemesh.js": {
|
||||
"bytes": 2568,
|
||||
"bytes": 2572,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -192,7 +192,7 @@
|
|||
"path": "src/facemesh/keypoints.js"
|
||||
},
|
||||
{
|
||||
"path": "src/facemesh/pipeline.js"
|
||||
"path": "src/facemesh/facepipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/facemesh/uvcoords.js"
|
||||
|
@ -202,12 +202,8 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/facemesh/keypoints.js": {
|
||||
"bytes": 2507,
|
||||
"imports": []
|
||||
},
|
||||
"src/facemesh/pipeline.js": {
|
||||
"bytes": 14262,
|
||||
"src/facemesh/facepipeline.js": {
|
||||
"bytes": 14296,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -223,6 +219,10 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/facemesh/keypoints.js": {
|
||||
"bytes": 2507,
|
||||
"imports": []
|
||||
},
|
||||
"src/facemesh/triangulation.js": {
|
||||
"bytes": 12940,
|
||||
"imports": []
|
||||
|
@ -240,7 +240,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/handpose/box.js": {
|
||||
"bytes": 2572,
|
||||
"bytes": 3192,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -248,7 +248,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytes": 4077,
|
||||
"bytes": 4484,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -258,32 +258,8 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 1849,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/keypoints.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/pipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/anchors.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/keypoints.js": {
|
||||
"bytes": 193,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/pipeline.js": {
|
||||
"bytes": 8216,
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytes": 8058,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -296,12 +272,29 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 2772,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/handpipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/anchors.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytes": 2488,
|
||||
"bytes": 3030,
|
||||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 15164,
|
||||
"bytes": 15209,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -488,7 +481,7 @@
|
|||
"dist/human.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5401085
|
||||
"bytes": 5403040
|
||||
},
|
||||
"dist/human.js": {
|
||||
"imports": [],
|
||||
|
@ -562,8 +555,8 @@
|
|||
"src/facemesh/util.js": {
|
||||
"bytesInOutput": 1190
|
||||
},
|
||||
"src/facemesh/pipeline.js": {
|
||||
"bytesInOutput": 5563
|
||||
"src/facemesh/facepipeline.js": {
|
||||
"bytesInOutput": 5577
|
||||
},
|
||||
"src/facemesh/uvcoords.js": {
|
||||
"bytesInOutput": 16786
|
||||
|
@ -617,25 +610,22 @@
|
|||
"bytesInOutput": 474
|
||||
},
|
||||
"src/handpose/box.js": {
|
||||
"bytesInOutput": 1362
|
||||
"bytesInOutput": 1398
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytesInOutput": 2010
|
||||
},
|
||||
"src/handpose/keypoints.js": {
|
||||
"bytesInOutput": 156
|
||||
"bytesInOutput": 1900
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytesInOutput": 988
|
||||
"bytesInOutput": 1005
|
||||
},
|
||||
"src/handpose/pipeline.js": {
|
||||
"bytesInOutput": 3221
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytesInOutput": 3231
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
"bytesInOutput": 127001
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytesInOutput": 992
|
||||
"bytesInOutput": 1090
|
||||
},
|
||||
"src/imagefx.js": {
|
||||
"bytesInOutput": 11014
|
||||
|
@ -647,10 +637,10 @@
|
|||
"bytesInOutput": 2567
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 8493
|
||||
"bytesInOutput": 8522
|
||||
}
|
||||
},
|
||||
"bytes": 1273205
|
||||
"bytes": 1273846
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -25,7 +25,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/facemesh/facemesh.js": {
|
||||
"bytes": 2568,
|
||||
"bytes": 2572,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/blazeface.js"
|
||||
|
@ -34,7 +34,7 @@
|
|||
"path": "src/facemesh/keypoints.js"
|
||||
},
|
||||
{
|
||||
"path": "src/facemesh/pipeline.js"
|
||||
"path": "src/facemesh/facepipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/facemesh/uvcoords.js"
|
||||
|
@ -44,12 +44,8 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/facemesh/keypoints.js": {
|
||||
"bytes": 2507,
|
||||
"imports": []
|
||||
},
|
||||
"src/facemesh/pipeline.js": {
|
||||
"bytes": 14262,
|
||||
"src/facemesh/facepipeline.js": {
|
||||
"bytes": 14296,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/box.js"
|
||||
|
@ -62,6 +58,10 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/facemesh/keypoints.js": {
|
||||
"bytes": 2507,
|
||||
"imports": []
|
||||
},
|
||||
"src/facemesh/triangulation.js": {
|
||||
"bytes": 12940,
|
||||
"imports": []
|
||||
|
@ -79,40 +79,19 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/handpose/box.js": {
|
||||
"bytes": 2572,
|
||||
"bytes": 3192,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytes": 4077,
|
||||
"bytes": 4484,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/box.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 1849,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/keypoints.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/pipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/anchors.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/keypoints.js": {
|
||||
"bytes": 193,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/pipeline.js": {
|
||||
"bytes": 8216,
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytes": 8058,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/box.js"
|
||||
|
@ -122,12 +101,26 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 2772,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/handpipeline.js"
|
||||
},
|
||||
{
|
||||
"path": "src/handpose/anchors.js"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytes": 2488,
|
||||
"bytes": 3030,
|
||||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 15164,
|
||||
"bytes": 15209,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/facemesh.js"
|
||||
|
@ -282,7 +275,7 @@
|
|||
"dist/human.node-nobundle.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 620872
|
||||
"bytes": 622873
|
||||
},
|
||||
"dist/human.node-nobundle.js": {
|
||||
"imports": [],
|
||||
|
@ -299,14 +292,14 @@
|
|||
"src/facemesh/util.js": {
|
||||
"bytesInOutput": 1171
|
||||
},
|
||||
"src/facemesh/pipeline.js": {
|
||||
"bytesInOutput": 5571
|
||||
"src/facemesh/facepipeline.js": {
|
||||
"bytesInOutput": 5585
|
||||
},
|
||||
"src/facemesh/uvcoords.js": {
|
||||
"bytesInOutput": 16785
|
||||
},
|
||||
"src/facemesh/triangulation.js": {
|
||||
"bytesInOutput": 9990
|
||||
"bytesInOutput": 9989
|
||||
},
|
||||
"src/facemesh/facemesh.js": {
|
||||
"bytesInOutput": 1254
|
||||
|
@ -318,7 +311,7 @@
|
|||
"bytesInOutput": 1236
|
||||
},
|
||||
"src/emotion/emotion.js": {
|
||||
"bytesInOutput": 1104
|
||||
"bytesInOutput": 1098
|
||||
},
|
||||
"src/posenet/modelBase.js": {
|
||||
"bytesInOutput": 455
|
||||
|
@ -354,43 +347,40 @@
|
|||
"bytesInOutput": 459
|
||||
},
|
||||
"src/handpose/box.js": {
|
||||
"bytesInOutput": 1376
|
||||
"bytesInOutput": 1419
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytesInOutput": 2004
|
||||
},
|
||||
"src/handpose/keypoints.js": {
|
||||
"bytesInOutput": 155
|
||||
"bytesInOutput": 1891
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytesInOutput": 972
|
||||
"bytesInOutput": 996
|
||||
},
|
||||
"src/handpose/pipeline.js": {
|
||||
"bytesInOutput": 3223
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytesInOutput": 3233
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
"bytesInOutput": 127000
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytesInOutput": 1014
|
||||
"bytesInOutput": 1112
|
||||
},
|
||||
"src/imagefx.js": {
|
||||
"bytesInOutput": 11013
|
||||
},
|
||||
"config.js": {
|
||||
"bytesInOutput": 1272
|
||||
"bytesInOutput": 1271
|
||||
},
|
||||
"package.json": {
|
||||
"bytesInOutput": 2567
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 29
|
||||
"bytesInOutput": 28
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 8439
|
||||
"bytesInOutput": 8467
|
||||
}
|
||||
},
|
||||
"bytes": 211622
|
||||
"bytes": 212264
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -524,7 +524,8 @@
|
|||
"callsites": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
|
||||
"integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="
|
||||
"integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
|
||||
"dev": true
|
||||
},
|
||||
"chalk": {
|
||||
"version": "4.1.0",
|
||||
|
@ -1407,6 +1408,7 @@
|
|||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.2.tgz",
|
||||
"integrity": "sha512-cTPNrlvJT6twpYy+YmKUKrTSjWFs3bjYjAhCwm+z4EOCubZxAuO+hHpRN64TqjEaYSHs7tJAE0w1CKMGmsG/lw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"parent-module": "^1.0.0",
|
||||
"resolve-from": "^4.0.0"
|
||||
|
@ -1570,8 +1572,7 @@
|
|||
"jsonc-parser": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-2.3.1.tgz",
|
||||
"integrity": "sha512-H8jvkz1O50L3dMZCsLqiuB2tA7muqbSg1AtGEkN0leAqGjsUzDJir3Zwr02BhqdcITPg3ei3mZ+HjMocAknhhg==",
|
||||
"dev": true
|
||||
"integrity": "sha512-H8jvkz1O50L3dMZCsLqiuB2tA7muqbSg1AtGEkN0leAqGjsUzDJir3Zwr02BhqdcITPg3ei3mZ+HjMocAknhhg=="
|
||||
},
|
||||
"levn": {
|
||||
"version": "0.4.1",
|
||||
|
@ -1961,6 +1962,7 @@
|
|||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
|
||||
"integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"callsites": "^3.0.0"
|
||||
}
|
||||
|
@ -2125,7 +2127,8 @@
|
|||
"resolve-from": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
|
||||
"integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="
|
||||
"integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
|
||||
"dev": true
|
||||
},
|
||||
"rimraf": {
|
||||
"version": "3.0.2",
|
||||
|
@ -2498,10 +2501,9 @@
|
|||
}
|
||||
},
|
||||
"vscode-json-languageservice": {
|
||||
"version": "3.9.1",
|
||||
"resolved": "https://registry.npmjs.org/vscode-json-languageservice/-/vscode-json-languageservice-3.9.1.tgz",
|
||||
"integrity": "sha512-oJkknkdCVitQ5XPSRa0weHjUxt8eSCptaL+MBQQlRsa6Nb8XnEY0S5wYnLUFHzEvKzwt01/LKk8LdOixWEXkNA==",
|
||||
"dev": true,
|
||||
"version": "3.10.0",
|
||||
"resolved": "https://registry.npmjs.org/vscode-json-languageservice/-/vscode-json-languageservice-3.10.0.tgz",
|
||||
"integrity": "sha512-8IvuRSQnjznu+obqy6Dy4S4H68Ke7a3Kb+A0FcdctyAMAWEnrORpCpMOMqEYiPLm/OTYLVWJ7ql3qToDTozu4w==",
|
||||
"requires": {
|
||||
"jsonc-parser": "^2.3.1",
|
||||
"vscode-languageserver-textdocument": "^1.0.1",
|
||||
|
@ -2513,26 +2515,22 @@
|
|||
"vscode-languageserver-textdocument": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.1.tgz",
|
||||
"integrity": "sha512-UIcJDjX7IFkck7cSkNNyzIz5FyvpQfY7sdzVy+wkKN/BLaD4DQ0ppXQrKePomCxTS7RrolK1I0pey0bG9eh8dA==",
|
||||
"dev": true
|
||||
"integrity": "sha512-UIcJDjX7IFkck7cSkNNyzIz5FyvpQfY7sdzVy+wkKN/BLaD4DQ0ppXQrKePomCxTS7RrolK1I0pey0bG9eh8dA=="
|
||||
},
|
||||
"vscode-languageserver-types": {
|
||||
"version": "3.16.0-next.2",
|
||||
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.16.0-next.2.tgz",
|
||||
"integrity": "sha512-QjXB7CKIfFzKbiCJC4OWC8xUncLsxo19FzGVp/ADFvvi87PlmBSCAtZI5xwGjF5qE0xkLf0jjKUn3DzmpDP52Q==",
|
||||
"dev": true
|
||||
"integrity": "sha512-QjXB7CKIfFzKbiCJC4OWC8xUncLsxo19FzGVp/ADFvvi87PlmBSCAtZI5xwGjF5qE0xkLf0jjKUn3DzmpDP52Q=="
|
||||
},
|
||||
"vscode-nls": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/vscode-nls/-/vscode-nls-5.0.0.tgz",
|
||||
"integrity": "sha512-u0Lw+IYlgbEJFF6/qAqG2d1jQmJl0eyAGJHoAJqr2HT4M2BNuQYSEiSE75f52pXHSJm8AlTjnLLbBFPrdz2hpA==",
|
||||
"dev": true
|
||||
"integrity": "sha512-u0Lw+IYlgbEJFF6/qAqG2d1jQmJl0eyAGJHoAJqr2HT4M2BNuQYSEiSE75f52pXHSJm8AlTjnLLbBFPrdz2hpA=="
|
||||
},
|
||||
"vscode-uri": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-2.1.2.tgz",
|
||||
"integrity": "sha512-8TEXQxlldWAuIODdukIb+TR5s+9Ds40eSJrw+1iDDA9IFORPjMELarNQE3myz5XIkWWpdprmJjm1/SxMlWOC8A==",
|
||||
"dev": true
|
||||
"integrity": "sha512-8TEXQxlldWAuIODdukIb+TR5s+9Ds40eSJrw+1iDDA9IFORPjMELarNQE3myz5XIkWWpdprmJjm1/SxMlWOC8A=="
|
||||
},
|
||||
"which": {
|
||||
"version": "2.0.2",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const tf = require('@tensorflow/tfjs');
|
||||
const blazeface = require('./blazeface');
|
||||
const keypoints = require('./keypoints');
|
||||
const pipe = require('./pipeline');
|
||||
const pipe = require('./facepipeline');
|
||||
const uv_coords = require('./uvcoords');
|
||||
const triangulation = require('./triangulation').default;
|
||||
|
||||
|
|
|
@ -134,7 +134,7 @@ class Pipeline {
|
|||
this.runsWithoutFaceDetector++;
|
||||
if (this.shouldUpdateRegionsOfInterest()) {
|
||||
const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||
if (detector.boxes.length === 0) {
|
||||
if (!detector || !detector.boxes || (detector.boxes.length === 0)) {
|
||||
this.regionsOfInterest = [];
|
||||
return null;
|
||||
}
|
|
@ -1,3 +1,19 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2020 Google LLC. All Rights Reserved.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* =============================================================================
|
||||
*/
|
||||
const tf = require('@tensorflow/tfjs');
|
||||
|
||||
function getBoxSize(box) {
|
||||
|
@ -6,35 +22,32 @@ function getBoxSize(box) {
|
|||
Math.abs(box.endPoint[1] - box.startPoint[1]),
|
||||
];
|
||||
}
|
||||
exports.getBoxSize = getBoxSize;
|
||||
|
||||
function getBoxCenter(box) {
|
||||
return [
|
||||
box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,
|
||||
box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,
|
||||
];
|
||||
}
|
||||
exports.getBoxCenter = getBoxCenter;
|
||||
|
||||
function cutBoxFromImageAndResize(box, image, cropSize) {
|
||||
const h = image.shape[1];
|
||||
const w = image.shape[2];
|
||||
const boxes = [[
|
||||
box.startPoint[1] / h, box.startPoint[0] / w, box.endPoint[1] / h,
|
||||
box.startPoint[1] / h,
|
||||
box.startPoint[0] / w,
|
||||
box.endPoint[1] / h,
|
||||
box.endPoint[0] / w,
|
||||
]];
|
||||
return tf.image.cropAndResize(image, boxes, [0], cropSize);
|
||||
}
|
||||
exports.cutBoxFromImageAndResize = cutBoxFromImageAndResize;
|
||||
|
||||
function scaleBoxCoordinates(box, factor) {
|
||||
const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];
|
||||
const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];
|
||||
const palmLandmarks = box.palmLandmarks.map((coord) => [coord[0] * factor[0], coord[1] * factor[1]]);
|
||||
const palmLandmarks = box.palmLandmarks.map((coord) => {
|
||||
const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];
|
||||
return scaledCoord;
|
||||
});
|
||||
return { startPoint, endPoint, palmLandmarks };
|
||||
}
|
||||
exports.scaleBoxCoordinates = scaleBoxCoordinates;
|
||||
|
||||
function enlargeBox(box, factor = 1.5) {
|
||||
const center = getBoxCenter(box);
|
||||
const size = getBoxSize(box);
|
||||
|
@ -43,8 +56,6 @@ function enlargeBox(box, factor = 1.5) {
|
|||
const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];
|
||||
return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };
|
||||
}
|
||||
exports.enlargeBox = enlargeBox;
|
||||
|
||||
function squarifyBox(box) {
|
||||
const centers = getBoxCenter(box);
|
||||
const size = getBoxSize(box);
|
||||
|
@ -54,15 +65,22 @@ function squarifyBox(box) {
|
|||
const endPoint = [centers[0] + halfSize, centers[1] + halfSize];
|
||||
return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };
|
||||
}
|
||||
exports.squarifyBox = squarifyBox;
|
||||
|
||||
function shiftBox(box, shiftFactor) {
|
||||
const boxSize = [
|
||||
box.endPoint[0] - box.startPoint[0], box.endPoint[1] - box.startPoint[1],
|
||||
box.endPoint[0] - box.startPoint[0],
|
||||
box.endPoint[1] - box.startPoint[1],
|
||||
];
|
||||
const shiftVector = [boxSize[0] * shiftFactor[0], boxSize[1] * shiftFactor[1]];
|
||||
const startPoint = [box.startPoint[0] + shiftVector[0], box.startPoint[1] + shiftVector[1]];
|
||||
const endPoint = [box.endPoint[0] + shiftVector[0], box.endPoint[1] + shiftVector[1]];
|
||||
return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };
|
||||
}
|
||||
exports.shiftBox = shiftBox;
|
||||
export {
|
||||
cutBoxFromImageAndResize,
|
||||
enlargeBox,
|
||||
getBoxCenter,
|
||||
getBoxSize,
|
||||
scaleBoxCoordinates,
|
||||
shiftBox,
|
||||
squarifyBox,
|
||||
};
|
||||
|
|
|
@ -1,15 +1,32 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2020 Google LLC. All Rights Reserved.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* =============================================================================
|
||||
*/
|
||||
|
||||
const tf = require('@tensorflow/tfjs');
|
||||
const bounding = require('./box');
|
||||
const box = require('./box');
|
||||
|
||||
class HandDetector {
|
||||
constructor(model, anchors, config) {
|
||||
constructor(model, inputSize, anchorsAnnotated) {
|
||||
this.model = model;
|
||||
this.width = config.inputSize;
|
||||
this.height = config.inputSize;
|
||||
this.anchors = anchors.map((anchor) => [anchor.x_center, anchor.y_center]);
|
||||
this.width = inputSize;
|
||||
this.height = inputSize;
|
||||
this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);
|
||||
this.anchorsTensor = tf.tensor2d(this.anchors);
|
||||
this.inputSizeTensor = tf.tensor1d([config.inputSize, config.inputSize]);
|
||||
this.doubleInputSizeTensor = tf.tensor1d([config.inputSize * 2, config.inputSize * 2]);
|
||||
this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);
|
||||
this.doubleInputSizeTensor = tf.tensor1d([inputSize * 2, inputSize * 2]);
|
||||
}
|
||||
|
||||
normalizeBoxes(boxes) {
|
||||
|
@ -31,59 +48,59 @@ class HandDetector {
|
|||
});
|
||||
}
|
||||
|
||||
async getBoundingBoxes(input) {
|
||||
const batchedPrediction = this.model.predict(input);
|
||||
async getBoundingBoxes(input, config) {
|
||||
const normalizedInput = tf.tidy(() => tf.mul(tf.sub(input, 0.5), 2));
|
||||
const batchedPrediction = this.model.predict(normalizedInput);
|
||||
const prediction = batchedPrediction.squeeze();
|
||||
// Regression score for each anchor point.
|
||||
const scores = tf.tidy(() => tf.sigmoid(tf.slice(prediction, [0, 0], [-1, 1])).squeeze());
|
||||
// Bounding box for each anchor point.
|
||||
const rawBoxes = tf.slice(prediction, [0, 1], [-1, 4]);
|
||||
const boxes = this.normalizeBoxes(rawBoxes);
|
||||
const boxesWithHandsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxHands, this.iouThreshold, this.scoreThreshold);
|
||||
const boxesWithHandsTensor = tf.image.nonMaxSuppression(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold);
|
||||
const boxesWithHands = boxesWithHandsTensor.arraySync();
|
||||
const detectedHands = tf.tidy(() => {
|
||||
const detectedBoxes = [];
|
||||
for (const i in boxesWithHands) {
|
||||
const boxIndex = boxesWithHands[i];
|
||||
const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);
|
||||
const rawPalmLandmarks = tf.slice(prediction, [boxIndex, 5], [1, 14]);
|
||||
const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));
|
||||
detectedBoxes.push({ boxes: matchingBox, palmLandmarks });
|
||||
}
|
||||
return detectedBoxes;
|
||||
});
|
||||
[batchedPrediction, boxesWithHandsTensor, prediction, boxes, rawBoxes, scores].forEach((tensor) => tensor.dispose());
|
||||
return detectedHands;
|
||||
const toDispose = [
|
||||
normalizedInput,
|
||||
batchedPrediction,
|
||||
boxesWithHandsTensor,
|
||||
prediction,
|
||||
boxes,
|
||||
rawBoxes,
|
||||
scores,
|
||||
];
|
||||
if (boxesWithHands.length === 0) {
|
||||
toDispose.forEach((tensor) => tensor.dispose());
|
||||
return null;
|
||||
}
|
||||
const hands = [];
|
||||
for (const boxIndex of boxesWithHands) {
|
||||
const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);
|
||||
const rawPalmLandmarks = tf.slice(prediction, [boxIndex, 5], [1, 14]);
|
||||
const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));
|
||||
rawPalmLandmarks.dispose();
|
||||
hands.push({ boxes: matchingBox, palmLandmarks });
|
||||
}
|
||||
toDispose.forEach((tensor) => tensor.dispose());
|
||||
return hands;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Box identifying the bounding box of a hand within the image.
|
||||
* Returns null if there is no hand in the image.
|
||||
*
|
||||
* @param input The image to classify.
|
||||
*/
|
||||
async estimateHandBounds(input, config) {
|
||||
this.iouThreshold = config.iouThreshold;
|
||||
this.scoreThreshold = config.scoreThreshold;
|
||||
this.maxHands = config.maxHands;
|
||||
const resized = input.resizeBilinear([this.width, this.height]);
|
||||
const divided = resized.mul([1 / 127.5]);
|
||||
const image = divided.sub(0.5);
|
||||
resized.dispose();
|
||||
divided.dispose();
|
||||
const predictions = await this.getBoundingBoxes(image);
|
||||
image.dispose();
|
||||
if (!predictions || (predictions.length === 0)) return null;
|
||||
const inputHeight = input.shape[1];
|
||||
const inputWidth = input.shape[2];
|
||||
const image = tf.tidy(() => input.resizeBilinear([this.width, this.height]).div(255));
|
||||
const predictions = await this.getBoundingBoxes(image, config);
|
||||
if (!predictions || predictions.length === 0) {
|
||||
image.dispose();
|
||||
return null;
|
||||
}
|
||||
const hands = [];
|
||||
for (const i in predictions) {
|
||||
const prediction = predictions[i];
|
||||
const boundingBoxes = prediction.boxes.dataSync();
|
||||
const startPoint = [boundingBoxes[0], boundingBoxes[1]];
|
||||
const endPoint = [boundingBoxes[2], boundingBoxes[3]];
|
||||
for (const prediction of predictions) {
|
||||
const boundingBoxes = prediction.boxes.arraySync();
|
||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
||||
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
||||
image.dispose();
|
||||
prediction.boxes.dispose();
|
||||
prediction.palmLandmarks.dispose();
|
||||
hands.push(bounding.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [input.shape[2] / this.width, input.shape[1] / this.height]));
|
||||
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [inputWidth / this.width, inputHeight / this.height]));
|
||||
}
|
||||
return hands;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,184 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2020 Google LLC. All Rights Reserved.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* =============================================================================
|
||||
*/
|
||||
|
||||
const tf = require('@tensorflow/tfjs');
|
||||
const box = require('./box');
|
||||
const util = require('./util');
|
||||
|
||||
const UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD = 0.8;
|
||||
const PALM_BOX_SHIFT_VECTOR = [0, -0.4];
|
||||
const PALM_BOX_ENLARGE_FACTOR = 3;
|
||||
const HAND_BOX_SHIFT_VECTOR = [0, -0.1];
|
||||
const HAND_BOX_ENLARGE_FACTOR = 1.65;
|
||||
const PALM_LANDMARK_IDS = [0, 5, 9, 13, 17, 1, 2];
|
||||
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
||||
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
||||
|
||||
class HandPipeline {
|
||||
constructor(boundingBoxDetector, meshDetector, inputSize) {
|
||||
this.boundingBoxDetector = boundingBoxDetector;
|
||||
this.meshDetector = meshDetector;
|
||||
this.inputSize = inputSize;
|
||||
this.regionsOfInterest = [];
|
||||
this.runsWithoutHandDetector = 0;
|
||||
this.maxHandsNumber = 1;
|
||||
this.skipFrames = 0;
|
||||
}
|
||||
|
||||
getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {
|
||||
const rotatedPalmLandmarks = palmLandmarks.map((coord) => {
|
||||
const homogeneousCoordinate = [...coord, 1];
|
||||
return util.rotatePoint(homogeneousCoordinate, rotationMatrix);
|
||||
});
|
||||
const boxAroundPalm = this.calculateLandmarksBoundingBox(rotatedPalmLandmarks);
|
||||
return box.enlargeBox(box.squarifyBox(box.shiftBox(boxAroundPalm, PALM_BOX_SHIFT_VECTOR)), PALM_BOX_ENLARGE_FACTOR);
|
||||
}
|
||||
|
||||
getBoxForHandLandmarks(landmarks) {
|
||||
const boundingBox = this.calculateLandmarksBoundingBox(landmarks);
|
||||
const boxAroundHand = box.enlargeBox(box.squarifyBox(box.shiftBox(boundingBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);
|
||||
const palmLandmarks = [];
|
||||
for (let i = 0; i < PALM_LANDMARK_IDS.length; i++) {
|
||||
palmLandmarks.push(landmarks[PALM_LANDMARK_IDS[i]].slice(0, 2));
|
||||
}
|
||||
boxAroundHand.palmLandmarks = palmLandmarks;
|
||||
return boxAroundHand;
|
||||
}
|
||||
|
||||
transformRawCoords(rawCoords, box2, angle, rotationMatrix) {
|
||||
const boxSize = box.getBoxSize(box2);
|
||||
const scaleFactor = [boxSize[0] / this.inputSize, boxSize[1] / this.inputSize];
|
||||
const coordsScaled = rawCoords.map((coord) => [
|
||||
scaleFactor[0] * (coord[0] - this.inputSize / 2),
|
||||
scaleFactor[1] * (coord[1] - this.inputSize / 2),
|
||||
coord[2],
|
||||
]);
|
||||
const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);
|
||||
const coordsRotated = coordsScaled.map((coord) => {
|
||||
const rotated = util.rotatePoint(coord, coordsRotationMatrix);
|
||||
return [...rotated, coord[2]];
|
||||
});
|
||||
const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);
|
||||
const boxCenter = [...box.getBoxCenter(box2), 1];
|
||||
const originalBoxCenter = [
|
||||
util.dot(boxCenter, inverseRotationMatrix[0]),
|
||||
util.dot(boxCenter, inverseRotationMatrix[1]),
|
||||
];
|
||||
return coordsRotated.map((coord) => [
|
||||
coord[0] + originalBoxCenter[0],
|
||||
coord[1] + originalBoxCenter[1],
|
||||
coord[2],
|
||||
]);
|
||||
}
|
||||
|
||||
async estimateHands(image, config) {
|
||||
this.skipFrames = config.skipFrames;
|
||||
const useFreshBox = this.shouldUpdateRegionsOfInterest();
|
||||
if (useFreshBox) {
|
||||
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config);
|
||||
this.regionsOfInterest = [];
|
||||
if (!boundingBoxPredictions || boundingBoxPredictions.length === 0) {
|
||||
image.dispose();
|
||||
return null;
|
||||
}
|
||||
for (const boundingBoxPrediction of boundingBoxPredictions) {
|
||||
this.regionsOfInterest.push(boundingBoxPrediction);
|
||||
}
|
||||
this.runsWithoutHandDetector = 0;
|
||||
} else {
|
||||
this.runsWithoutHandDetector++;
|
||||
}
|
||||
const hands = [];
|
||||
for (const i in this.regionsOfInterest) {
|
||||
const currentBox = this.regionsOfInterest[i];
|
||||
if (!currentBox) continue;
|
||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||
const palmCenter = box.getBoxCenter(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
|
||||
const rotatedImage = tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);
|
||||
const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);
|
||||
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = box.cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);
|
||||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
const prediction = this.meshDetector.predict(handImage);
|
||||
const [confidence, keypoints] = prediction;
|
||||
handImage.dispose();
|
||||
const confidenceValue = confidence.dataSync()[0];
|
||||
confidence.dispose();
|
||||
if (confidenceValue < config.minConfidence) {
|
||||
keypoints.dispose();
|
||||
this.regionsOfInterest[i] = null;
|
||||
return null;
|
||||
}
|
||||
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
||||
const rawCoords = keypointsReshaped.arraySync();
|
||||
keypoints.dispose();
|
||||
keypointsReshaped.dispose();
|
||||
const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords);
|
||||
this.updateRegionsOfInterest(nextBoundingBox, i);
|
||||
const result = {
|
||||
landmarks: coords,
|
||||
handInViewConfidence: confidenceValue,
|
||||
boundingBox: {
|
||||
topLeft: nextBoundingBox.startPoint,
|
||||
bottomRight: nextBoundingBox.endPoint,
|
||||
},
|
||||
};
|
||||
hands.push(result);
|
||||
}
|
||||
return hands;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
calculateLandmarksBoundingBox(landmarks) {
|
||||
const xs = landmarks.map((d) => d[0]);
|
||||
const ys = landmarks.map((d) => d[1]);
|
||||
const startPoint = [Math.min(...xs), Math.min(...ys)];
|
||||
const endPoint = [Math.max(...xs), Math.max(...ys)];
|
||||
return { startPoint, endPoint };
|
||||
}
|
||||
|
||||
updateRegionsOfInterest(newBox, i) {
|
||||
const previousBox = this.regionsOfInterest[i];
|
||||
let iou = 0;
|
||||
if (previousBox != null && previousBox.startPoint != null) {
|
||||
const [boxStartX, boxStartY] = newBox.startPoint;
|
||||
const [boxEndX, boxEndY] = newBox.endPoint;
|
||||
const [previousBoxStartX, previousBoxStartY] = previousBox.startPoint;
|
||||
const [previousBoxEndX, previousBoxEndY] = previousBox.endPoint;
|
||||
const xStartMax = Math.max(boxStartX, previousBoxStartX);
|
||||
const yStartMax = Math.max(boxStartY, previousBoxStartY);
|
||||
const xEndMin = Math.min(boxEndX, previousBoxEndX);
|
||||
const yEndMin = Math.min(boxEndY, previousBoxEndY);
|
||||
const intersection = (xEndMin - xStartMax) * (yEndMin - yStartMax);
|
||||
const boxArea = (boxEndX - boxStartX) * (boxEndY - boxStartY);
|
||||
const previousBoxArea = (previousBoxEndX - previousBoxStartX) * (previousBoxEndY - boxStartY);
|
||||
iou = intersection / (boxArea + previousBoxArea - intersection);
|
||||
}
|
||||
this.regionsOfInterest[i] = iou > UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD ? previousBox : newBox;
|
||||
}
|
||||
|
||||
shouldUpdateRegionsOfInterest() {
|
||||
console.log(this.regionsOfInterest.length, this.runsWithoutHandDetector, !this.regionsOfInterest || (this.regionsOfInterest.length === 0) || (this.runsWithoutHandDetector >= this.skipFrames));
|
||||
return !this.regionsOfInterest || (this.regionsOfInterest.length === 0) || (this.runsWithoutHandDetector >= this.skipFrames);
|
||||
}
|
||||
}
|
||||
|
||||
exports.HandPipeline = HandPipeline;
|
|
@ -1,30 +1,54 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2020 Google LLC. All Rights Reserved.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* =============================================================================
|
||||
*/
|
||||
const tf = require('@tensorflow/tfjs');
|
||||
const hand = require('./handdetector');
|
||||
const keypoints = require('./keypoints');
|
||||
const pipe = require('./pipeline');
|
||||
const anchors = require('./anchors.js');
|
||||
const handdetector = require('./handdetector');
|
||||
const pipeline = require('./handpipeline');
|
||||
const anchors = require('./anchors');
|
||||
|
||||
const MESH_ANNOTATIONS = {
|
||||
thumb: [1, 2, 3, 4],
|
||||
indexFinger: [5, 6, 7, 8],
|
||||
middleFinger: [9, 10, 11, 12],
|
||||
ringFinger: [13, 14, 15, 16],
|
||||
pinky: [17, 18, 19, 20],
|
||||
palmBase: [0],
|
||||
};
|
||||
|
||||
class HandPose {
|
||||
constructor(pipeline) {
|
||||
this.pipeline = pipeline;
|
||||
constructor(pipe) {
|
||||
this.pipeline = pipe;
|
||||
}
|
||||
|
||||
static getAnnotations() {
|
||||
return MESH_ANNOTATIONS;
|
||||
}
|
||||
|
||||
async estimateHands(input, config) {
|
||||
this.skipFrames = config.skipFrames;
|
||||
this.detectionConfidence = config.minConfidence;
|
||||
this.maxHands = config.maxHands;
|
||||
const predictions = await this.pipeline.estimateHands(input, config);
|
||||
if (!predictions) return [];
|
||||
const hands = [];
|
||||
if (!predictions) return hands;
|
||||
for (const prediction of predictions) {
|
||||
if (!prediction) return [];
|
||||
const annotations = {};
|
||||
for (const key of Object.keys(keypoints.MESH_ANNOTATIONS)) {
|
||||
annotations[key] = keypoints.MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
|
||||
for (const key of Object.keys(MESH_ANNOTATIONS)) {
|
||||
annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
|
||||
}
|
||||
hands.push({
|
||||
confidence: prediction.confidence || 0,
|
||||
box: prediction.box ? [prediction.box.topLeft[0], prediction.box.topLeft[1], prediction.box.bottomRight[0] - prediction.box.topLeft[0], prediction.box.bottomRight[1] - prediction.box.topLeft[1]] : 0,
|
||||
confidence: prediction.handInViewConfidence,
|
||||
box: prediction.boundingBox ? [prediction.boundingBox.topLeft[0], prediction.boundingBox.topLeft[1], prediction.boundingBox.bottomRight[0] - prediction.boundingBox.topLeft[0], prediction.boundingBox.bottomRight[1] - prediction.boundingBox.topLeft[1]] : 0,
|
||||
landmarks: prediction.landmarks,
|
||||
annotations,
|
||||
});
|
||||
|
@ -35,13 +59,14 @@ class HandPose {
|
|||
exports.HandPose = HandPose;
|
||||
|
||||
async function load(config) {
|
||||
// maxContinuousChecks = Infinity, detectionConfidence = 0.8, iouThreshold = 0.3, scoreThreshold = 0.5
|
||||
const [handDetectorModel, handPoseModel] = await Promise.all([
|
||||
tf.loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),
|
||||
tf.loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),
|
||||
]);
|
||||
const detector = new hand.HandDetector(handDetectorModel, anchors.anchors, config);
|
||||
const pipeline = new pipe.HandPipeline(detector, handPoseModel, config);
|
||||
const handpose = new HandPose(pipeline);
|
||||
const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors);
|
||||
const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize);
|
||||
const handpose = new HandPose(pipe);
|
||||
return handpose;
|
||||
}
|
||||
exports.load = load;
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
exports.MESH_ANNOTATIONS = {
|
||||
thumb: [1, 2, 3, 4],
|
||||
indexFinger: [5, 6, 7, 8],
|
||||
middleFinger: [9, 10, 11, 12],
|
||||
ringFinger: [13, 14, 15, 16],
|
||||
pinky: [17, 18, 19, 20],
|
||||
palmBase: [0],
|
||||
};
|
|
@ -1,177 +0,0 @@
|
|||
const tf = require('@tensorflow/tfjs');
|
||||
const bounding = require('./box');
|
||||
const util = require('./util');
|
||||
|
||||
const UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD = 0.8;
|
||||
const PALM_BOX_SHIFT_VECTOR = [0, -0.4];
|
||||
const HAND_BOX_SHIFT_VECTOR = [0, -0.1];
|
||||
const HAND_BOX_ENLARGE_FACTOR = 1.65;
|
||||
const PALM_LANDMARK_IDS = [0, 5, 9, 13, 17, 1, 2];
|
||||
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
|
||||
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
|
||||
|
||||
// The Pipeline coordinates between the bounding box and skeleton models.
|
||||
class HandPipeline {
|
||||
constructor(boundingBoxDetector, detector, config) {
|
||||
this.regionsOfInterest = [];
|
||||
this.runsWithoutHandDetector = 0;
|
||||
this.boundingBoxDetector = boundingBoxDetector;
|
||||
this.detector = detector;
|
||||
this.meshWidth = config.inputSize;
|
||||
this.meshHeight = config.inputSize;
|
||||
this.enlargeFactor = config.enlargeFactor;
|
||||
}
|
||||
|
||||
// Get the bounding box surrounding the hand, given palm landmarks.
|
||||
getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {
|
||||
const rotatedPalmLandmarks = palmLandmarks.map((coord) => {
|
||||
const homogeneousCoordinate = [...coord, 1];
|
||||
return util.rotatePoint(homogeneousCoordinate, rotationMatrix);
|
||||
});
|
||||
const boxAroundPalm = this.calculateLandmarksBoundingBox(rotatedPalmLandmarks);
|
||||
// boxAroundPalm only surrounds the palm - therefore we shift it
|
||||
// upwards so it will capture fingers once enlarged + squarified.
|
||||
return bounding.enlargeBox(bounding.squarifyBox(bounding.shiftBox(boxAroundPalm, PALM_BOX_SHIFT_VECTOR)), this.enlargeFactor);
|
||||
}
|
||||
|
||||
// Get the bounding box surrounding the hand, given all hand landmarks.
|
||||
getBoxForHandLandmarks(landmarks) {
|
||||
// The MediaPipe hand mesh model is trained on hands with empty space
|
||||
// around them, so we still need to shift / enlarge boxAroundHand even
|
||||
// though it surrounds the entire hand.
|
||||
const boundingBox = this.calculateLandmarksBoundingBox(landmarks);
|
||||
const boxAroundHand = bounding.enlargeBox(bounding.squarifyBox(bounding.shiftBox(boundingBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);
|
||||
const palmLandmarks = [];
|
||||
for (let i = 0; i < PALM_LANDMARK_IDS.length; i++) {
|
||||
palmLandmarks.push(landmarks[PALM_LANDMARK_IDS[i]].slice(0, 2));
|
||||
}
|
||||
boxAroundHand.palmLandmarks = palmLandmarks;
|
||||
return boxAroundHand;
|
||||
}
|
||||
|
||||
// Scale, rotate, and translate raw keypoints from the model so they map to
|
||||
// the input coordinates.
|
||||
transformRawCoords(rawCoords, box, angle, rotationMatrix) {
|
||||
const boxSize = bounding.getBoxSize(box);
|
||||
const scaleFactor = [boxSize[0] / this.meshWidth, boxSize[1] / this.meshHeight];
|
||||
const coordsScaled = rawCoords.map((coord) => [
|
||||
scaleFactor[0] * (coord[0] - this.meshWidth / 2),
|
||||
scaleFactor[1] * (coord[1] - this.meshHeight / 2), coord[2],
|
||||
]);
|
||||
const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);
|
||||
const coordsRotated = coordsScaled.map((coord) => {
|
||||
const rotated = util.rotatePoint(coord, coordsRotationMatrix);
|
||||
return [...rotated, coord[2]];
|
||||
});
|
||||
const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);
|
||||
const boxCenter = [...bounding.getBoxCenter(box), 1];
|
||||
const originalBoxCenter = [
|
||||
util.dot(boxCenter, inverseRotationMatrix[0]),
|
||||
util.dot(boxCenter, inverseRotationMatrix[1]),
|
||||
];
|
||||
return coordsRotated.map((coord) => [
|
||||
coord[0] + originalBoxCenter[0], coord[1] + originalBoxCenter[1],
|
||||
coord[2],
|
||||
]);
|
||||
}
|
||||
|
||||
async estimateHands(image, config) {
|
||||
this.skipFrames = config.skipFrames;
|
||||
this.detectionConfidence = config.minConfidence;
|
||||
this.maxHands = config.maxHands;
|
||||
this.runsWithoutHandDetector++;
|
||||
const useFreshBox = this.shouldUpdateRegionsOfInterest();
|
||||
if (useFreshBox === true) {
|
||||
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config);
|
||||
this.regionsOfInterest = [];
|
||||
for (const i in boundingBoxPredictions) {
|
||||
this.updateRegionsOfInterest(boundingBoxPredictions[i], true /* force update */, i);
|
||||
}
|
||||
this.runsWithoutHandDetector = 0;
|
||||
}
|
||||
// Rotate input so the hand is vertically oriented.
|
||||
const hands = [];
|
||||
if (!this.regionsOfInterest) return hands;
|
||||
for (const i in this.regionsOfInterest) {
|
||||
const currentBox = this.regionsOfInterest[i] ? this.regionsOfInterest[i][0] : null;
|
||||
if (!currentBox) return hands;
|
||||
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
|
||||
const palmCenter = bounding.getBoxCenter(currentBox);
|
||||
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
|
||||
const rotatedImage = tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);
|
||||
const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);
|
||||
const box = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
|
||||
const croppedInput = bounding.cutBoxFromImageAndResize(box, rotatedImage, [this.meshWidth, this.meshHeight]);
|
||||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
const prediction = this.detector.predict(handImage);
|
||||
const [confidence, keypoints] = prediction;
|
||||
handImage.dispose();
|
||||
const confidenceVal = confidence.dataSync()[0];
|
||||
confidence.dispose();
|
||||
if (confidenceVal < config.minConfidence) {
|
||||
keypoints.dispose();
|
||||
this.regionsOfInterest[i] = [];
|
||||
return hands;
|
||||
}
|
||||
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
||||
const rawCoords = keypointsReshaped.arraySync();
|
||||
keypoints.dispose();
|
||||
keypointsReshaped.dispose();
|
||||
const coords = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords);
|
||||
this.updateRegionsOfInterest(nextBoundingBox, false /* force replace */, i);
|
||||
const result = {
|
||||
landmarks: coords,
|
||||
confidence: confidenceVal,
|
||||
box: {
|
||||
topLeft: nextBoundingBox.startPoint,
|
||||
bottomRight: nextBoundingBox.endPoint,
|
||||
},
|
||||
};
|
||||
hands.push(result);
|
||||
}
|
||||
return hands;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
calculateLandmarksBoundingBox(landmarks) {
|
||||
const xs = landmarks.map((d) => d[0]);
|
||||
const ys = landmarks.map((d) => d[1]);
|
||||
const startPoint = [Math.min(...xs), Math.min(...ys)];
|
||||
const endPoint = [Math.max(...xs), Math.max(...ys)];
|
||||
return { startPoint, endPoint };
|
||||
}
|
||||
|
||||
// Updates regions of interest if the intersection over union between
|
||||
// the incoming and previous regions falls below a threshold.
|
||||
updateRegionsOfInterest(box, forceUpdate, index) {
|
||||
if (forceUpdate) {
|
||||
this.regionsOfInterest[index] = [box];
|
||||
} else {
|
||||
const previousBox = this.regionsOfInterest[index][0];
|
||||
let iou = 0;
|
||||
if (previousBox != null && previousBox.startPoint != null) {
|
||||
const [boxStartX, boxStartY] = box.startPoint;
|
||||
const [boxEndX, boxEndY] = box.endPoint;
|
||||
const [previousBoxStartX, previousBoxStartY] = previousBox.startPoint;
|
||||
const [previousBoxEndX, previousBoxEndY] = previousBox.endPoint;
|
||||
const xStartMax = Math.max(boxStartX, previousBoxStartX);
|
||||
const yStartMax = Math.max(boxStartY, previousBoxStartY);
|
||||
const xEndMin = Math.min(boxEndX, previousBoxEndX);
|
||||
const yEndMin = Math.min(boxEndY, previousBoxEndY);
|
||||
const intersection = (xEndMin - xStartMax) * (yEndMin - yStartMax);
|
||||
const boxArea = (boxEndX - boxStartX) * (boxEndY - boxStartY);
|
||||
const previousBoxArea = (previousBoxEndX - previousBoxStartX) * (previousBoxEndY - boxStartY);
|
||||
iou = intersection / (boxArea + previousBoxArea - intersection);
|
||||
}
|
||||
this.regionsOfInterest[index][0] = iou > UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD ? previousBox : box;
|
||||
}
|
||||
}
|
||||
|
||||
shouldUpdateRegionsOfInterest() {
|
||||
return !this.regionsOfInterest || (this.regionsOfInterest.length === 0) || (this.runsWithoutHandDetector >= this.skipFrames);
|
||||
}
|
||||
}
|
||||
exports.HandPipeline = HandPipeline;
|
|
@ -1,15 +1,27 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2020 Google LLC. All Rights Reserved.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* =============================================================================
|
||||
*/
|
||||
function normalizeRadians(angle) {
|
||||
return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));
|
||||
}
|
||||
exports.normalizeRadians = normalizeRadians;
|
||||
|
||||
function computeRotation(point1, point2) {
|
||||
const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);
|
||||
return normalizeRadians(radians);
|
||||
}
|
||||
exports.computeRotation = computeRotation;
|
||||
|
||||
const buildTranslationMatrix = (x, y) => ([[1, 0, x], [0, 1, y], [0, 0, 1]]);
|
||||
const buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];
|
||||
function dot(v1, v2) {
|
||||
let product = 0;
|
||||
for (let i = 0; i < v1.length; i++) {
|
||||
|
@ -17,8 +29,6 @@ function dot(v1, v2) {
|
|||
}
|
||||
return product;
|
||||
}
|
||||
exports.dot = dot;
|
||||
|
||||
function getColumnFrom2DArr(arr, columnIndex) {
|
||||
const column = [];
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
|
@ -26,8 +36,6 @@ function getColumnFrom2DArr(arr, columnIndex) {
|
|||
}
|
||||
return column;
|
||||
}
|
||||
exports.getColumnFrom2DArr = getColumnFrom2DArr;
|
||||
|
||||
function multiplyTransformMatrices(mat1, mat2) {
|
||||
const product = [];
|
||||
const size = mat1.length;
|
||||
|
@ -48,8 +56,6 @@ function buildRotationMatrix(rotation, center) {
|
|||
const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);
|
||||
return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);
|
||||
}
|
||||
exports.buildRotationMatrix = buildRotationMatrix;
|
||||
|
||||
function invertTransformMatrix(matrix) {
|
||||
const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];
|
||||
const translationComponent = [matrix[0][2], matrix[1][2]];
|
||||
|
@ -63,12 +69,18 @@ function invertTransformMatrix(matrix) {
|
|||
[0, 0, 1],
|
||||
];
|
||||
}
|
||||
exports.invertTransformMatrix = invertTransformMatrix;
|
||||
|
||||
function rotatePoint(homogeneousCoordinate, rotationMatrix) {
|
||||
return [
|
||||
dot(homogeneousCoordinate, rotationMatrix[0]),
|
||||
dot(homogeneousCoordinate, rotationMatrix[1]),
|
||||
];
|
||||
}
|
||||
exports.rotatePoint = rotatePoint;
|
||||
export {
|
||||
buildRotationMatrix,
|
||||
computeRotation,
|
||||
dot,
|
||||
getColumnFrom2DArr,
|
||||
invertTransformMatrix,
|
||||
normalizeRadians,
|
||||
rotatePoint,
|
||||
};
|
||||
|
|
53
src/human.js
53
src/human.js
|
@ -167,6 +167,7 @@ class Human {
|
|||
this.log('Changing WebGL: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
|
||||
tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
|
||||
}
|
||||
tf.ENV.set('WEBGL_CPU_FORWARD', true);
|
||||
await tf.ready();
|
||||
}
|
||||
}
|
||||
|
@ -284,32 +285,6 @@ class Human {
|
|||
perf.image = Math.trunc(now() - timeStamp);
|
||||
const imageTensor = image.tensor;
|
||||
|
||||
// run posenet
|
||||
if (this.config.async) {
|
||||
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||
} else {
|
||||
this.state = 'run:body';
|
||||
timeStamp = now();
|
||||
this.analyze('Start PoseNet');
|
||||
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||
this.analyze('End PoseNet:');
|
||||
perf.body = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
// run handpose
|
||||
if (this.config.async) {
|
||||
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||
} else {
|
||||
this.state = 'run:hand';
|
||||
timeStamp = now();
|
||||
this.analyze('Start HandPose:');
|
||||
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||
this.analyze('End HandPose:');
|
||||
perf.hand = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
if (this.config.async) [poseRes, handRes] = await Promise.all([poseRes, handRes]);
|
||||
|
||||
// run facemesh, includes blazeface and iris
|
||||
const faceRes = [];
|
||||
if (this.config.face.enabled) {
|
||||
|
@ -357,6 +332,32 @@ class Human {
|
|||
}
|
||||
}
|
||||
|
||||
// run posenet
|
||||
if (this.config.async) {
|
||||
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||
} else {
|
||||
this.state = 'run:body';
|
||||
timeStamp = now();
|
||||
this.analyze('Start PoseNet');
|
||||
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
||||
this.analyze('End PoseNet:');
|
||||
perf.body = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
// run handpose
|
||||
if (this.config.async) {
|
||||
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||
} else {
|
||||
this.state = 'run:hand';
|
||||
timeStamp = now();
|
||||
this.analyze('Start HandPose:');
|
||||
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
||||
this.analyze('End HandPose:');
|
||||
perf.hand = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
if (this.config.async) [poseRes, handRes] = await Promise.all([poseRes, handRes]);
|
||||
|
||||
imageTensor.dispose();
|
||||
this.state = 'idle';
|
||||
|
||||
|
|
Loading…
Reference in New Issue