updated typings

pull/70/head
Vladimir Mandic 2021-02-08 12:47:38 -05:00
parent 6fb133b55e
commit 7fd677681c
76 changed files with 1188 additions and 716 deletions

View File

@ -1,4 +1,3 @@
// @ts-nocheck
/* eslint-disable max-len */
// based on: https://github.com/munrocket/gl-bench

View File

@ -1,5 +1,3 @@
// @ts-nocheck
let instance = 0;
let CSScreated = false;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{
"inputs": {
"dist/human.esm.js": {
"bytes": 1343005,
"bytes": 1342808,
"imports": []
},
"demo/draw.js": {
@ -9,11 +9,11 @@
"imports": []
},
"demo/menu.js": {
"bytes": 13874,
"bytes": 13858,
"imports": []
},
"demo/gl-bench.js": {
"bytes": 10797,
"bytes": 10782,
"imports": []
},
"demo/browser.js": {
@ -43,14 +43,14 @@
"imports": [],
"exports": [],
"inputs": {},
"bytes": 2018414
"bytes": 2018735
},
"dist/demo-browser-index.js": {
"imports": [],
"exports": [],
"inputs": {
"dist/human.esm.js": {
"bytesInOutput": 1335513
"bytesInOutput": 1335316
},
"demo/draw.js": {
"bytesInOutput": 6204
@ -65,7 +65,7 @@
"bytesInOutput": 16815
}
},
"bytes": 1383372
"bytes": 1383175
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

190
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

56
dist/human.esm.json vendored
View File

@ -9,7 +9,7 @@
"imports": []
},
"src/tfjs/backend.ts": {
"bytes": 2168,
"bytes": 2228,
"imports": [
{
"path": "src/log.ts",
@ -22,7 +22,7 @@
]
},
"src/blazeface/blazeface.ts": {
"bytes": 6024,
"bytes": 6119,
"imports": [
{
"path": "src/log.ts",
@ -44,7 +44,7 @@
]
},
"src/blazeface/util.ts": {
"bytes": 2777,
"bytes": 2809,
"imports": []
},
"src/blazeface/coords.ts": {
@ -52,7 +52,7 @@
"imports": []
},
"src/blazeface/facepipeline.ts": {
"bytes": 14254,
"bytes": 14251,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -73,7 +73,7 @@
]
},
"src/blazeface/facemesh.ts": {
"bytes": 2935,
"bytes": 2899,
"imports": [
{
"path": "src/log.ts",
@ -107,7 +107,7 @@
]
},
"src/faceboxes/faceboxes.ts": {
"bytes": 2852,
"bytes": 2846,
"imports": [
{
"path": "src/log.ts",
@ -124,7 +124,7 @@
]
},
"src/age/age.ts": {
"bytes": 2064,
"bytes": 1949,
"imports": [
{
"path": "src/log.ts",
@ -141,7 +141,7 @@
]
},
"src/gender/gender.ts": {
"bytes": 2904,
"bytes": 2811,
"imports": [
{
"path": "src/log.ts",
@ -158,7 +158,7 @@
]
},
"src/emotion/emotion.ts": {
"bytes": 3033,
"bytes": 3042,
"imports": [
{
"path": "src/log.ts",
@ -175,7 +175,7 @@
]
},
"src/embedding/embedding.ts": {
"bytes": 1992,
"bytes": 1912,
"imports": [
{
"path": "src/log.ts",
@ -227,7 +227,7 @@
]
},
"src/posenet/decoders.ts": {
"bytes": 1943,
"bytes": 1958,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -257,7 +257,7 @@
]
},
"src/posenet/decodeMultiple.ts": {
"bytes": 2259,
"bytes": 2301,
"imports": [
{
"path": "src/posenet/buildParts.ts",
@ -321,7 +321,7 @@
]
},
"src/handpose/handdetector.ts": {
"bytes": 3627,
"bytes": 3701,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -334,11 +334,11 @@
]
},
"src/handpose/util.ts": {
"bytes": 2254,
"bytes": 2286,
"imports": []
},
"src/handpose/handpipeline.ts": {
"bytes": 7344,
"bytes": 7279,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -359,7 +359,7 @@
"imports": []
},
"src/handpose/handpose.ts": {
"bytes": 2529,
"bytes": 2565,
"imports": [
{
"path": "src/log.ts",
@ -384,7 +384,7 @@
]
},
"src/gesture/gesture.ts": {
"bytes": 4265,
"bytes": 4497,
"imports": []
},
"src/imagefx.js": {
@ -392,7 +392,7 @@
"imports": []
},
"src/image.ts": {
"bytes": 5851,
"bytes": 5867,
"imports": [
{
"path": "src/log.ts",
@ -417,11 +417,11 @@
"imports": []
},
"package.json": {
"bytes": 2344,
"bytes": 2385,
"imports": []
},
"src/human.ts": {
"bytes": 19393,
"bytes": 19619,
"imports": [
{
"path": "src/log.ts",
@ -499,7 +499,7 @@
"imports": [],
"exports": [],
"inputs": {},
"bytes": 1923046
"bytes": 1923329
},
"dist/human.esm.js": {
"imports": [],
@ -538,10 +538,10 @@
"bytesInOutput": 28983
},
"src/blazeface/facepipeline.ts": {
"bytesInOutput": 5046
"bytesInOutput": 5040
},
"src/human.ts": {
"bytesInOutput": 10223
"bytesInOutput": 10295
},
"src/faceboxes/faceboxes.ts": {
"bytesInOutput": 1549
@ -550,16 +550,16 @@
"bytesInOutput": 606
},
"src/age/age.ts": {
"bytesInOutput": 826
"bytesInOutput": 775
},
"src/gender/gender.ts": {
"bytesInOutput": 1309
"bytesInOutput": 1237
},
"src/emotion/emotion.ts": {
"bytesInOutput": 1243
"bytesInOutput": 1180
},
"src/embedding/embedding.ts": {
"bytesInOutput": 802
"bytesInOutput": 725
},
"src/posenet/posenet.ts": {
"bytesInOutput": 1007
@ -622,7 +622,7 @@
"bytesInOutput": 16
}
},
"bytes": 1343005
"bytes": 1342808
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

18
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

322
dist/human.node.json vendored
View File

@ -9,7 +9,7 @@
"imports": []
},
"src/tfjs/backend.ts": {
"bytes": 2168,
"bytes": 2228,
"imports": [
{
"path": "src/log.ts",
@ -21,8 +21,8 @@
}
]
},
"src/blazeface/blazeface.js": {
"bytes": 6983,
"src/blazeface/blazeface.ts": {
"bytes": 6119,
"imports": [
{
"path": "src/log.ts",
@ -34,8 +34,8 @@
}
]
},
"src/blazeface/box.js": {
"bytes": 1935,
"src/blazeface/box.ts": {
"bytes": 1727,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -43,41 +43,37 @@
}
]
},
"src/blazeface/util.js": {
"bytes": 3087,
"src/blazeface/util.ts": {
"bytes": 2809,
"imports": []
},
"src/blazeface/coords.js": {
"bytes": 37915,
"src/blazeface/coords.ts": {
"bytes": 37783,
"imports": []
},
"src/blazeface/facepipeline.js": {
"bytes": 14028,
"src/blazeface/facepipeline.ts": {
"bytes": 14251,
"imports": [
{
"path": "dist/tfjs.esm.js",
"kind": "import-statement"
},
{
"path": "src/blazeface/box.js",
"path": "src/blazeface/box.ts",
"kind": "import-statement"
},
{
"path": "src/blazeface/util.js",
"path": "src/blazeface/util.ts",
"kind": "import-statement"
},
{
"path": "src/blazeface/coords.js",
"kind": "import-statement"
},
{
"path": "src/log.ts",
"path": "src/blazeface/coords.ts",
"kind": "import-statement"
}
]
},
"src/blazeface/facemesh.js": {
"bytes": 2898,
"src/blazeface/facemesh.ts": {
"bytes": 2899,
"imports": [
{
"path": "src/log.ts",
@ -88,15 +84,15 @@
"kind": "import-statement"
},
{
"path": "src/blazeface/blazeface.js",
"path": "src/blazeface/blazeface.ts",
"kind": "import-statement"
},
{
"path": "src/blazeface/facepipeline.js",
"path": "src/blazeface/facepipeline.ts",
"kind": "import-statement"
},
{
"path": "src/blazeface/coords.js",
"path": "src/blazeface/coords.ts",
"kind": "import-statement"
}
]
@ -111,7 +107,7 @@
]
},
"src/faceboxes/faceboxes.ts": {
"bytes": 2852,
"bytes": 2846,
"imports": [
{
"path": "src/log.ts",
@ -128,7 +124,7 @@
]
},
"src/age/age.ts": {
"bytes": 2064,
"bytes": 1949,
"imports": [
{
"path": "src/log.ts",
@ -145,7 +141,7 @@
]
},
"src/gender/gender.ts": {
"bytes": 2904,
"bytes": 2811,
"imports": [
{
"path": "src/log.ts",
@ -162,7 +158,7 @@
]
},
"src/emotion/emotion.ts": {
"bytes": 3033,
"bytes": 3042,
"imports": [
{
"path": "src/log.ts",
@ -179,7 +175,7 @@
]
},
"src/embedding/embedding.ts": {
"bytes": 1992,
"bytes": 1912,
"imports": [
{
"path": "src/log.ts",
@ -195,8 +191,8 @@
}
]
},
"src/posenet/modelBase.js": {
"bytes": 1343,
"src/posenet/modelBase.ts": {
"bytes": 1333,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -204,90 +200,90 @@
}
]
},
"src/posenet/heapSort.js": {
"bytes": 1590,
"src/posenet/heapSort.ts": {
"bytes": 1645,
"imports": []
},
"src/posenet/buildParts.js": {
"bytes": 1775,
"src/posenet/buildParts.ts": {
"bytes": 1723,
"imports": [
{
"path": "src/posenet/heapSort.js",
"path": "src/posenet/heapSort.ts",
"kind": "import-statement"
}
]
},
"src/posenet/keypoints.js": {
"bytes": 2011,
"src/posenet/keypoints.ts": {
"bytes": 2025,
"imports": []
},
"src/posenet/vectors.js": {
"bytes": 1273,
"src/posenet/vectors.ts": {
"bytes": 1075,
"imports": [
{
"path": "src/posenet/keypoints.js",
"path": "src/posenet/keypoints.ts",
"kind": "import-statement"
}
]
},
"src/posenet/decoders.js": {
"bytes": 2083,
"src/posenet/decoders.ts": {
"bytes": 1958,
"imports": [
{
"path": "dist/tfjs.esm.js",
"kind": "import-statement"
},
{
"path": "src/posenet/keypoints.js",
"path": "src/posenet/keypoints.ts",
"kind": "import-statement"
}
]
},
"src/posenet/decodePose.js": {
"bytes": 5216,
"src/posenet/decodePose.ts": {
"bytes": 5152,
"imports": [
{
"path": "src/posenet/keypoints.js",
"path": "src/posenet/keypoints.ts",
"kind": "import-statement"
},
{
"path": "src/posenet/vectors.js",
"path": "src/posenet/vectors.ts",
"kind": "import-statement"
},
{
"path": "src/posenet/decoders.js",
"path": "src/posenet/decoders.ts",
"kind": "import-statement"
}
]
},
"src/posenet/decodeMultiple.js": {
"bytes": 2303,
"src/posenet/decodeMultiple.ts": {
"bytes": 2301,
"imports": [
{
"path": "src/posenet/buildParts.js",
"path": "src/posenet/buildParts.ts",
"kind": "import-statement"
},
{
"path": "src/posenet/decodePose.js",
"path": "src/posenet/decodePose.ts",
"kind": "import-statement"
},
{
"path": "src/posenet/vectors.js",
"path": "src/posenet/vectors.ts",
"kind": "import-statement"
}
]
},
"src/posenet/util.js": {
"bytes": 2262,
"src/posenet/util.ts": {
"bytes": 2017,
"imports": [
{
"path": "src/posenet/keypoints.js",
"path": "src/posenet/keypoints.ts",
"kind": "import-statement"
}
]
},
"src/posenet/posenet.js": {
"bytes": 2406,
"src/posenet/posenet.ts": {
"bytes": 2376,
"imports": [
{
"path": "src/log.ts",
@ -298,25 +294,25 @@
"kind": "import-statement"
},
{
"path": "src/posenet/modelBase.js",
"path": "src/posenet/modelBase.ts",
"kind": "import-statement"
},
{
"path": "src/posenet/decodeMultiple.js",
"path": "src/posenet/decodeMultiple.ts",
"kind": "import-statement"
},
{
"path": "src/posenet/decodePose.js",
"path": "src/posenet/decodePose.ts",
"kind": "import-statement"
},
{
"path": "src/posenet/util.js",
"path": "src/posenet/util.ts",
"kind": "import-statement"
}
]
},
"src/handpose/box.js": {
"bytes": 2522,
"src/handpose/box.ts": {
"bytes": 2443,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -324,50 +320,46 @@
}
]
},
"src/handpose/handdetector.js": {
"bytes": 3548,
"src/handpose/handdetector.ts": {
"bytes": 3701,
"imports": [
{
"path": "dist/tfjs.esm.js",
"kind": "import-statement"
},
{
"path": "src/handpose/box.js",
"path": "src/handpose/box.ts",
"kind": "import-statement"
}
]
},
"src/handpose/util.js": {
"bytes": 2326,
"src/handpose/util.ts": {
"bytes": 2286,
"imports": []
},
"src/handpose/handpipeline.js": {
"bytes": 7243,
"src/handpose/handpipeline.ts": {
"bytes": 7279,
"imports": [
{
"path": "dist/tfjs.esm.js",
"kind": "import-statement"
},
{
"path": "src/handpose/box.js",
"path": "src/handpose/box.ts",
"kind": "import-statement"
},
{
"path": "src/handpose/util.js",
"kind": "import-statement"
},
{
"path": "src/log.ts",
"path": "src/handpose/util.ts",
"kind": "import-statement"
}
]
},
"src/handpose/anchors.js": {
"bytes": 224151,
"src/handpose/anchors.ts": {
"bytes": 224156,
"imports": []
},
"src/handpose/handpose.js": {
"bytes": 2536,
"src/handpose/handpose.ts": {
"bytes": 2565,
"imports": [
{
"path": "src/log.ts",
@ -378,29 +370,29 @@
"kind": "import-statement"
},
{
"path": "src/handpose/handdetector.js",
"path": "src/handpose/handdetector.ts",
"kind": "import-statement"
},
{
"path": "src/handpose/handpipeline.js",
"path": "src/handpose/handpipeline.ts",
"kind": "import-statement"
},
{
"path": "src/handpose/anchors.js",
"path": "src/handpose/anchors.ts",
"kind": "import-statement"
}
]
},
"src/gesture/gesture.ts": {
"bytes": 4265,
"bytes": 4497,
"imports": []
},
"src/imagefx.js": {
"bytes": 19311,
"bytes": 19447,
"imports": []
},
"src/image.ts": {
"bytes": 5851,
"bytes": 5867,
"imports": [
{
"path": "src/log.ts",
@ -425,11 +417,11 @@
"imports": []
},
"package.json": {
"bytes": 2321,
"bytes": 2385,
"imports": []
},
"src/human.ts": {
"bytes": 19233,
"bytes": 19619,
"imports": [
{
"path": "src/log.ts",
@ -444,7 +436,7 @@
"kind": "import-statement"
},
{
"path": "src/blazeface/facemesh.js",
"path": "src/blazeface/facemesh.ts",
"kind": "import-statement"
},
{
@ -468,11 +460,11 @@
"kind": "import-statement"
},
{
"path": "src/posenet/posenet.js",
"path": "src/posenet/posenet.ts",
"kind": "import-statement"
},
{
"path": "src/handpose/handpose.js",
"path": "src/handpose/handpose.ts",
"kind": "import-statement"
},
{
@ -507,77 +499,26 @@
"imports": [],
"exports": [],
"inputs": {},
"bytes": 712944
"bytes": 706125
},
"dist/human.node-gpu.js": {
"imports": [],
"exports": [],
"inputs": {
"dist/tfjs.esm.js": {
"bytesInOutput": 550
"bytesInOutput": 551
},
"src/blazeface/blazeface.js": {
"bytesInOutput": 3077
"src/blazeface/facemesh.ts": {
"bytesInOutput": 1476
},
"src/blazeface/box.js": {
"bytesInOutput": 998
"src/posenet/keypoints.ts": {
"bytesInOutput": 1677
},
"src/blazeface/util.js": {
"bytesInOutput": 1173
},
"src/blazeface/coords.js": {
"bytesInOutput": 30504
},
"src/blazeface/facepipeline.js": {
"bytesInOutput": 5593
},
"src/blazeface/facemesh.js": {
"bytesInOutput": 1574
},
"src/posenet/modelBase.js": {
"bytesInOutput": 688
},
"src/posenet/heapSort.js": {
"bytesInOutput": 1047
},
"src/posenet/buildParts.js": {
"bytesInOutput": 523
},
"src/posenet/keypoints.js": {
"bytesInOutput": 1624
},
"src/posenet/vectors.js": {
"bytesInOutput": 615
},
"src/posenet/decoders.js": {
"bytesInOutput": 925
},
"src/posenet/decodePose.js": {
"bytesInOutput": 1465
},
"src/posenet/decodeMultiple.js": {
"bytesInOutput": 694
},
"src/posenet/util.js": {
"bytesInOutput": 1051
},
"src/posenet/posenet.js": {
"bytesInOutput": 1223
},
"src/handpose/handdetector.js": {
"bytesInOutput": 1844
},
"src/handpose/handpipeline.js": {
"bytesInOutput": 2503
},
"src/handpose/anchors.js": {
"bytesInOutput": 127005
},
"src/handpose/handpose.js": {
"bytesInOutput": 1397
"src/imagefx.js": {
"bytesInOutput": 11012
},
"src/human.ts": {
"bytesInOutput": 10355
"bytesInOutput": 10462
},
"src/log.ts": {
"bytesInOutput": 251
@ -585,38 +526,89 @@
"src/tfjs/backend.ts": {
"bytesInOutput": 1304
},
"src/blazeface/blazeface.ts": {
"bytesInOutput": 2329
},
"src/blazeface/facepipeline.ts": {
"bytesInOutput": 5089
},
"src/blazeface/box.ts": {
"bytesInOutput": 854
},
"src/blazeface/util.ts": {
"bytesInOutput": 848
},
"src/blazeface/coords.ts": {
"bytesInOutput": 28973
},
"src/faceboxes/faceboxes.ts": {
"bytesInOutput": 1592
"bytesInOutput": 1586
},
"src/profile.ts": {
"bytesInOutput": 604
},
"src/age/age.ts": {
"bytesInOutput": 828
"bytesInOutput": 822
},
"src/gender/gender.ts": {
"bytesInOutput": 1337
"bytesInOutput": 1310
},
"src/emotion/emotion.ts": {
"bytesInOutput": 1265
"bytesInOutput": 1247
},
"src/embedding/embedding.ts": {
"bytesInOutput": 851
"bytesInOutput": 769
},
"src/handpose/box.js": {
"src/posenet/posenet.ts": {
"bytesInOutput": 1030
},
"src/posenet/modelBase.ts": {
"bytesInOutput": 656
},
"src/posenet/heapSort.ts": {
"bytesInOutput": 1017
},
"src/posenet/buildParts.ts": {
"bytesInOutput": 454
},
"src/posenet/decodePose.ts": {
"bytesInOutput": 1282
},
"src/posenet/vectors.ts": {
"bytesInOutput": 345
},
"src/posenet/decoders.ts": {
"bytesInOutput": 823
},
"src/posenet/decodeMultiple.ts": {
"bytesInOutput": 553
},
"src/posenet/util.ts": {
"bytesInOutput": 352
},
"src/handpose/handpose.ts": {
"bytesInOutput": 1304
},
"src/handpose/handdetector.ts": {
"bytesInOutput": 1813
},
"src/handpose/box.ts": {
"bytesInOutput": 958
},
"src/handpose/util.js": {
"bytesInOutput": 812
"src/handpose/handpipeline.ts": {
"bytesInOutput": 2470
},
"src/handpose/util.ts": {
"bytesInOutput": 806
},
"src/handpose/anchors.ts": {
"bytesInOutput": 126985
},
"src/gesture/gesture.ts": {
"bytesInOutput": 2391
},
"src/image.ts": {
"bytesInOutput": 2434
},
"src/imagefx.js": {
"bytesInOutput": 11045
"bytesInOutput": 2455
},
"config.js": {
"bytesInOutput": 1426
@ -628,7 +620,7 @@
"bytesInOutput": 16
}
},
"bytes": 279609
"bytes": 274367
}
}
}

190
dist/human.ts vendored

File diff suppressed because one or more lines are too long

4
dist/human.ts.map vendored

File diff suppressed because one or more lines are too long

56
dist/human.tson vendored
View File

@ -9,7 +9,7 @@
"imports": []
},
"src/tfjs/backend.ts": {
"bytes": 2168,
"bytes": 2228,
"imports": [
{
"path": "src/log.ts",
@ -22,7 +22,7 @@
]
},
"src/blazeface/blazeface.ts": {
"bytes": 6024,
"bytes": 6119,
"imports": [
{
"path": "src/log.ts",
@ -44,7 +44,7 @@
]
},
"src/blazeface/util.ts": {
"bytes": 2777,
"bytes": 2809,
"imports": []
},
"src/blazeface/coords.ts": {
@ -52,7 +52,7 @@
"imports": []
},
"src/blazeface/facepipeline.ts": {
"bytes": 14254,
"bytes": 14251,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -73,7 +73,7 @@
]
},
"src/blazeface/facemesh.ts": {
"bytes": 2935,
"bytes": 2899,
"imports": [
{
"path": "src/log.ts",
@ -107,7 +107,7 @@
]
},
"src/faceboxes/faceboxes.ts": {
"bytes": 2852,
"bytes": 2846,
"imports": [
{
"path": "src/log.ts",
@ -124,7 +124,7 @@
]
},
"src/age/age.ts": {
"bytes": 2064,
"bytes": 1949,
"imports": [
{
"path": "src/log.ts",
@ -141,7 +141,7 @@
]
},
"src/gender/gender.ts": {
"bytes": 2904,
"bytes": 2811,
"imports": [
{
"path": "src/log.ts",
@ -158,7 +158,7 @@
]
},
"src/emotion/emotion.ts": {
"bytes": 3033,
"bytes": 3042,
"imports": [
{
"path": "src/log.ts",
@ -175,7 +175,7 @@
]
},
"src/embedding/embedding.ts": {
"bytes": 1992,
"bytes": 1912,
"imports": [
{
"path": "src/log.ts",
@ -227,7 +227,7 @@
]
},
"src/posenet/decoders.ts": {
"bytes": 1943,
"bytes": 1958,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -257,7 +257,7 @@
]
},
"src/posenet/decodeMultiple.ts": {
"bytes": 2259,
"bytes": 2301,
"imports": [
{
"path": "src/posenet/buildParts.ts",
@ -321,7 +321,7 @@
]
},
"src/handpose/handdetector.ts": {
"bytes": 3627,
"bytes": 3701,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -334,11 +334,11 @@
]
},
"src/handpose/util.ts": {
"bytes": 2254,
"bytes": 2286,
"imports": []
},
"src/handpose/handpipeline.ts": {
"bytes": 7344,
"bytes": 7279,
"imports": [
{
"path": "dist/tfjs.esm.js",
@ -359,7 +359,7 @@
"imports": []
},
"src/handpose/handpose.ts": {
"bytes": 2529,
"bytes": 2565,
"imports": [
{
"path": "src/log.ts",
@ -384,7 +384,7 @@
]
},
"src/gesture/gesture.ts": {
"bytes": 4265,
"bytes": 4497,
"imports": []
},
"src/imagefx.js": {
@ -392,7 +392,7 @@
"imports": []
},
"src/image.ts": {
"bytes": 5851,
"bytes": 5867,
"imports": [
{
"path": "src/log.ts",
@ -417,11 +417,11 @@
"imports": []
},
"package.json": {
"bytes": 2344,
"bytes": 2385,
"imports": []
},
"src/human.ts": {
"bytes": 19393,
"bytes": 19619,
"imports": [
{
"path": "src/log.ts",
@ -499,7 +499,7 @@
"imports": [],
"exports": [],
"inputs": {},
"bytes": 1923057
"bytes": 1923340
},
"dist/human.ts": {
"imports": [],
@ -515,7 +515,7 @@
"bytesInOutput": 11016
},
"src/human.ts": {
"bytesInOutput": 10259
"bytesInOutput": 10331
},
"src/log.ts": {
"bytesInOutput": 252
@ -539,7 +539,7 @@
"bytesInOutput": 28983
},
"src/blazeface/facepipeline.ts": {
"bytesInOutput": 5046
"bytesInOutput": 5040
},
"src/faceboxes/faceboxes.ts": {
"bytesInOutput": 1549
@ -548,16 +548,16 @@
"bytesInOutput": 606
},
"src/age/age.ts": {
"bytesInOutput": 826
"bytesInOutput": 775
},
"src/gender/gender.ts": {
"bytesInOutput": 1309
"bytesInOutput": 1237
},
"src/emotion/emotion.ts": {
"bytesInOutput": 1243
"bytesInOutput": 1180
},
"src/embedding/embedding.ts": {
"bytesInOutput": 802
"bytesInOutput": 725
},
"src/posenet/posenet.ts": {
"bytesInOutput": 1007
@ -620,7 +620,7 @@
"bytesInOutput": 16
}
},
"bytes": 1343047
"bytes": 1342850
}
}
}

6
package-lock.json generated
View File

@ -2527,9 +2527,9 @@
"dev": true
},
"typescript": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.3.tgz",
"integrity": "sha512-B3ZIOf1IKeH2ixgHhj6la6xdwR9QrLC5d1VKeCSY4tvkqhF2eqd9O7txNlS0PO3GrBAFIdr3L1ndNwteUbZLYg==",
"version": "4.2.0-dev.20210208",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.2.0-dev.20210208.tgz",
"integrity": "sha512-zZD4z9MS8iy9dPxwBeEcQDemi13rfdAk7oCyAaiMQXsw1rcdmdsNGjpaQ8OZiY3I5Y/vaoDCu7yBB1ZaNSemLA==",
"dev": true
},
"uri-js": {

View File

@ -46,7 +46,7 @@
"seedrandom": "^3.0.5",
"simple-git": "^2.34.2",
"tslib": "^2.1.0",
"typescript": "^4.1.3"
"typescript": "^4.2.0-dev.20210208"
},
"scripts": {
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",

View File

@ -1,8 +1,9 @@
#!/usr/bin/env -S node --trace-warnings
const fs = require('fs');
const esbuild = require('esbuild');
const log = require('@vladmandic/pilogger');
const esbuild = require('esbuild');
const ts = require('typescript');
// keeps esbuild service instance cached
let es;
@ -15,6 +16,29 @@ const banner = `
*/
`;
// tsc configuration for building types only
const tsconfig = {
noEmitOnError: false,
target: ts.ScriptTarget.ES2018,
module: ts.ModuleKind.ES2020,
outDir: 'types/',
declaration: true,
emitDeclarationOnly: true,
emitDecoratorMetadata: true,
experimentalDecorators: true,
skipLibCheck: true,
importHelpers: true,
noImplicitAny: false,
preserveConstEnums: true,
strictNullChecks: true,
baseUrl: './',
typeRoots: ['node_modules/@types'],
paths: {
tslib: ['node_modules/tslib/tslib.d.ts'],
'@tensorflow/tfjs-node/dist/io/file_system': ['node_modules/@tensorflow/tfjs-node/dist/io/file_system.js'],
},
};
// common configuration
const common = {
banner,
@ -29,42 +53,6 @@ const common = {
};
const targets = {
node: {
tfjs: {
platform: 'node',
format: 'cjs',
metafile: 'dist/tfjs.esm.json',
entryPoints: ['src/tfjs/tf-node.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
metafile: 'dist/human.node.json',
entryPoints: ['src/human.ts'],
outfile: 'dist/human.node.js',
external: ['@tensorflow'],
},
},
nodeGPU: {
tfjs: {
platform: 'node',
format: 'cjs',
metafile: 'dist/tfjs.esm.json',
entryPoints: ['src/tfjs/tf-node-gpu.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
metafile: 'dist/human.node.json',
entryPoints: ['src/human.ts'],
outfile: 'dist/human.node-gpu.js',
external: ['@tensorflow'],
},
},
browserNoBundle: {
tfjs: {
platform: 'browser',
@ -118,6 +106,42 @@ const targets = {
external: ['fs', 'buffer', 'util'],
},
},
node: {
tfjs: {
platform: 'node',
format: 'cjs',
metafile: 'dist/tfjs.esm.json',
entryPoints: ['src/tfjs/tf-node.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
metafile: 'dist/human.node.json',
entryPoints: ['src/human.ts'],
outfile: 'dist/human.node.js',
external: ['@tensorflow'],
},
},
nodeGPU: {
tfjs: {
platform: 'node',
format: 'cjs',
metafile: 'dist/tfjs.esm.json',
entryPoints: ['src/tfjs/tf-node-gpu.ts'],
outfile: 'dist/tfjs.esm.js',
external: ['@tensorflow'],
},
node: {
platform: 'node',
format: 'cjs',
metafile: 'dist/human.node.json',
entryPoints: ['src/human.ts'],
outfile: 'dist/human.node-gpu.js',
external: ['@tensorflow'],
},
},
};
async function getStats(metafile) {
@ -147,6 +171,28 @@ async function getStats(metafile) {
return stats;
}
// rebuild typings
function compile(fileNames, options) {
log.info('Compile:', fileNames);
const program = ts.createProgram(fileNames, options);
const emit = program.emit();
const diag = ts
.getPreEmitDiagnostics(program)
.concat(emit.diagnostics);
for (const info of diag) {
// @ts-ignore
const msg = info.messageText.messageText || info.messageText;
if (msg.includes('package.json')) continue;
if (msg.includes('Expected 0 arguments, but got 1')) continue;
if (info.file) {
const pos = info.file.getLineAndCharacterOfPosition(info.start || 0);
log.error(`TSC: ${info.file.fileName} [${pos.line + 1},${pos.character + 1}]:`, msg);
} else {
log.error('TSC:', msg);
}
}
}
// rebuild on file change
async function build(f, msg) {
if (busy) {
@ -169,6 +215,8 @@ async function build(f, msg) {
log.state(`Build for: ${targetGroupName} type: ${targetName}:`, stats);
}
}
// generate typings
compile(targets.browserBundle.esm.entryPoints, tsconfig);
if (require.main === module) process.exit(0);
} catch (err) {
// catch errors and print where it occured

View File

@ -2,20 +2,20 @@ import { log } from '../log';
import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile.js';
const models = { age: null };
let model;
let last = { age: 0 };
let skipped = Number.MAX_SAFE_INTEGER;
export async function load(config) {
if (!models.age) {
models.age = await tf.loadGraphModel(config.face.age.modelPath);
if (!model) {
model = await tf.loadGraphModel(config.face.age.modelPath);
log(`load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`);
}
return models.age;
return model;
}
export async function predict(image, config) {
if (!models.age) return null;
if (!model) return null;
if ((skipped < config.face.age.skipFrames) && config.videoOptimized && last.age && (last.age > 0)) {
skipped++;
return last;
@ -38,13 +38,12 @@ export async function predict(image, config) {
tf.dispose(resize);
let ageT;
const obj = { age: undefined };
const obj = { age: 0 };
if (models.age) {
if (!config.profile) {
if (config.face.age.enabled) ageT = await models.age.predict(enhance);
if (config.face.age.enabled) ageT = await model.predict(enhance);
} else {
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
const profileAge = config.face.age.enabled ? await tf.profile(() => model.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
profile.run('age', profileAge);
@ -58,7 +57,6 @@ export async function predict(image, config) {
ageT.dispose();
last = obj;
}
resolve(obj);
});
}

View File

@ -5,7 +5,7 @@ const NUM_LANDMARKS = 6;
function generateAnchors(inputSize) {
const spec = { strides: [inputSize / 16, inputSize / 8], anchors: [2, 6] };
const anchors = [];
const anchors: Array<[number, number]> = [];
for (let i = 0; i < spec.strides.length; i++) {
const stride = spec.strides[i];
const gridRows = Math.floor((inputSize + stride - 1) / stride);
@ -121,7 +121,7 @@ export class BlazeFaceModel {
});
const scoresVal = scores.dataSync();
const annotatedBoxes = [];
const annotatedBoxes: Array<{ box: any, landmarks: any, anchor: any, confidence: number }> = [];
for (let i = 0; i < boundingBoxes.length; i++) {
const boxIndex = boxIndices[i];
const confidence = scoresVal[boxIndex];

View File

@ -15,7 +15,7 @@ export class MediaPipeFaceMesh {
async estimateFaces(input, config) {
const predictions = await this.facePipeline.predict(input, config);
const results = [];
const results: Array<{}> = [];
for (const prediction of (predictions || [])) {
if (prediction.isDisposedInternal) continue; // guard against disposed tensors on long running operations such as pause in middle of processing
const mesh = prediction.coords ? prediction.coords.arraySync() : null;
@ -33,15 +33,7 @@ export class MediaPipeFaceMesh {
Math.min(input.shape[2], prediction.box.endPoint[0]) - prediction.box.startPoint[0],
Math.min(input.shape[1], prediction.box.endPoint[1]) - prediction.box.startPoint[1],
] : 0;
results.push({
confidence: prediction.confidence || 0,
box,
mesh,
boxRaw,
meshRaw,
annotations,
image: prediction.image ? tf.clone(prediction.image) : null,
});
results.push({ confidence: prediction.confidence || 0, box, mesh, boxRaw, meshRaw, annotations, image: prediction.image ? tf.clone(prediction.image) : null });
if (prediction.coords) prediction.coords.dispose();
if (prediction.image) prediction.image.dispose();
}
@ -51,6 +43,7 @@ export class MediaPipeFaceMesh {
let faceModels = [null, null, null];
export async function load(config) {
// @ts-ignore
faceModels = await Promise.all([
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(config.face.mesh.modelPath, { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null,

View File

@ -26,8 +26,8 @@ function replaceRawCoordinates(rawCoords, newCoords, prefix, keys = null) {
for (let i = 0; i < coords.MESH_TO_IRIS_INDICES_MAP.length; i++) {
const { key, indices } = coords.MESH_TO_IRIS_INDICES_MAP[i];
const originalIndices = coords.MESH_ANNOTATIONS[`${prefix}${key}`];
const shouldReplaceAllKeys = keys === null;
if (shouldReplaceAllKeys || keys.includes(key)) {
// @ts-ignore
if (!keys || keys.includes(key)) {
for (let j = 0; j < indices.length; j++) {
const index = indices[j];
rawCoords[originalIndices[j]] = [
@ -108,15 +108,13 @@ export class Pipeline {
// Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.
getEyeCoords(eyeData, eyeBox, eyeBoxSize, flip = false) {
const eyeRawCoords = [];
const eyeRawCoords: Array<any[]> = [];
for (let i = 0; i < IRIS_NUM_COORDINATES; i++) {
const x = eyeData[i * 3];
const y = eyeData[i * 3 + 1];
const z = eyeData[i * 3 + 2];
eyeRawCoords.push([
(flip
? (1 - (x / this.irisSize))
: (x / this.irisSize)) * eyeBoxSize[0] + eyeBox.startPoint[0],
(flip ? (1 - (x / this.irisSize)) : (x / this.irisSize)) * eyeBoxSize[0] + eyeBox.startPoint[0],
(y / this.irisSize) * eyeBoxSize[1] + eyeBox.startPoint[1], z,
]);
}
@ -237,8 +235,10 @@ export class Pipeline {
replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right');
// If the user is looking to the left or to the right, the iris coordinates tend to diverge too much from the mesh coordinates for them to be merged. So we only update a single contour line above and below the eye.
} else if (leftToRightEyeDepthDifference < 1) { // User is looking towards the right.
// @ts-ignore
replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left', ['EyeUpper0', 'EyeLower0']);
} else { // User is looking towards the left.
// @ts-ignore
replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right', ['EyeUpper0', 'EyeLower0']);
}
const adjustedLeftIrisCoords = this.getAdjustedIrisCoords(rawCoords, leftIrisRawCoords, 'left');

View File

@ -34,7 +34,7 @@ export function dot(v1, v2) {
}
export function getColumnFrom2DArr(arr, columnIndex) {
const column = [];
const column: Array<number> = [];
for (let i = 0; i < arr.length; i++) {
column.push(arr[i][columnIndex]);
}
@ -42,7 +42,7 @@ export function getColumnFrom2DArr(arr, columnIndex) {
}
export function multiplyTransformMatrices(mat1, mat2) {
const product = [];
const product: Array<number[]> = [];
const size = mat1.length;
for (let row = 0; row < size; row++) {
product.push([]);

View File

@ -5,14 +5,14 @@ import * as profile from '../profile.js';
// based on https://github.com/sirius-ai/MobileFaceNet_TF
// model converted from https://github.com/sirius-ai/MobileFaceNet_TF/files/3551493/FaceMobileNet192_train_false.zip
const models = { embedding: null };
let model;
export async function load(config) {
if (!models.embedding) {
models.embedding = await tf.loadGraphModel(config.face.embedding.modelPath);
if (!model) {
model = await tf.loadGraphModel(config.face.embedding.modelPath);
log(`load model: ${config.face.embedding.modelPath.match(/\/(.*)\./)[1]}`);
}
return models.embedding;
return model;
}
export function simmilarity(embedding1, embedding2) {
@ -25,18 +25,18 @@ export function simmilarity(embedding1, embedding2) {
}
export async function predict(image, config) {
if (!models.embedding) return null;
if (!model) return null;
return new Promise(async (resolve) => {
const resize = tf.image.resizeBilinear(image, [config.face.embedding.inputSize, config.face.embedding.inputSize], false);
// const normalize = tf.tidy(() => resize.div(127.5).sub(0.5)); // this is -0.5...0.5 ???
let data = [];
let data: Array<[]> = [];
if (config.face.embedding.enabled) {
if (!config.profile) {
const embeddingT = await models.embedding.predict({ img_inputs: resize });
const embeddingT = await model.predict({ img_inputs: resize });
data = [...embeddingT.dataSync()]; // convert object array to standard array
tf.dispose(embeddingT);
} else {
const profileData = await tf.profile(() => models.embedding.predict({ img_inputs: resize }));
const profileData = await tf.profile(() => model.predict({ img_inputs: resize }));
data = [...profileData.result.dataSync()];
profileData.result.dispose();
profile.run('emotion', profileData);

View File

@ -3,8 +3,8 @@ import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile.js';
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
const models = { emotion: null };
let last = [];
let model;
let last: Array<{ score: number, emotion: string }> = [];
let skipped = Number.MAX_SAFE_INTEGER;
// tuning values
@ -12,15 +12,15 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
const scale = 1; // score multiplication factor
export async function load(config) {
if (!models.emotion) {
models.emotion = await tf.loadGraphModel(config.face.emotion.modelPath);
if (!model) {
model = await tf.loadGraphModel(config.face.emotion.modelPath);
log(`load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
}
return models.emotion;
return model;
}
export async function predict(image, config) {
if (!models.emotion) return null;
if (!model) return null;
if ((skipped < config.face.emotion.skipFrames) && config.videoOptimized && (last.length > 0)) {
skipped++;
return last;
@ -54,15 +54,15 @@ export async function predict(image, config) {
blueNorm.dispose();
const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));
grayscale.dispose();
const obj = [];
const obj: Array<{ score: number, emotion: string }> = [];
if (config.face.emotion.enabled) {
let data;
if (!config.profile) {
const emotionT = await models.emotion.predict(normalize);
const emotionT = await model.predict(normalize);
data = emotionT.dataSync();
tf.dispose(emotionT);
} else {
const profileData = await tf.profile(() => models.emotion.predict(normalize));
const profileData = await tf.profile(() => model.predict(normalize));
data = profileData.result.dataSync();
profileData.result.dispose();
profile.run('emotion', profileData);

View File

@ -15,7 +15,7 @@ export class FaceBoxes {
async estimateFaces(input, config) {
if (config) this.config = config;
const results = [];
const results: Array<{ confidence: number, box: any, boxRaw: any, image: any }> = [];
const resizeT = tf.image.resizeBilinear(input, [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
const castT = resizeT.toInt();
let scores;
@ -51,15 +51,8 @@ export class FaceBoxes {
const resized = tf.image.cropAndResize(input, [crop], [0], [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
const image = resized.div([255]);
resized.dispose();
results.push({
confidence: scores[i],
box,
boxRaw: this.config.face.mesh.returnRawData ? boxRaw : null,
image,
// mesh,
// meshRaw,
// annotations,
});
results.push({ confidence: scores[i], box, boxRaw: this.config.face.mesh.returnRawData ? boxRaw : null, image, });
// add mesh, meshRaw, annotations,
}
}
return results;

View File

@ -2,7 +2,7 @@ import { log } from '../log';
import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile.js';
const models = { gender: null };
let model;
let last = { gender: '' };
let skipped = Number.MAX_SAFE_INTEGER;
let alternative = false;
@ -11,16 +11,16 @@ let alternative = false;
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
export async function load(config) {
if (!models.gender) {
models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
alternative = models.gender.inputs[0].shape[3] === 1;
if (!model) {
model = await tf.loadGraphModel(config.face.gender.modelPath);
alternative = model.inputs[0].shape[3] === 1;
log(`load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`);
}
return models.gender;
return model;
}
export async function predict(image, config) {
if (!models.gender) return null;
if (!model) return null;
if ((skipped < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') {
skipped++;
return last;
@ -45,12 +45,12 @@ export async function predict(image, config) {
tf.dispose(resize);
let genderT;
const obj = { gender: undefined, confidence: undefined };
const obj = { gender: '', confidence: 0 };
if (!config.profile) {
if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);
if (config.face.gender.enabled) genderT = await model.predict(enhance);
} else {
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
const profileGender = config.face.gender.enabled ? await tf.profile(() => model.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
profile.run('gender', profileGender);

View File

@ -1,6 +1,6 @@
export const body = (res) => {
if (!res) return [];
const gestures = [];
const gestures: Array<{ body: number, gesture: string }> = [];
for (let i = 0; i < res.length; i++) {
// raising hands
const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist'));
@ -20,7 +20,7 @@ export const body = (res) => {
export const face = (res) => {
if (!res) return [];
const gestures = [];
const gestures: Array<{ face: number, gesture: string }> = [];
for (let i = 0; i < res.length; i++) {
if (res[i].mesh && res[i].mesh.length > 0) {
const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2];
@ -41,7 +41,7 @@ export const face = (res) => {
export const iris = (res) => {
if (!res) return [];
const gestures = [];
const gestures: Array<{ iris: number, gesture: string }> = [];
for (let i = 0; i < res.length; i++) {
if (!res[i].annotations || !res[i].annotations.leftEyeIris || !res[i].annotations.rightEyeIris) continue;
const sizeXLeft = res[i].annotations.leftEyeIris[3][0] - res[i].annotations.leftEyeIris[1][0];
@ -60,10 +60,11 @@ export const iris = (res) => {
export const hand = (res) => {
if (!res) return [];
const gestures = [];
const gestures: Array<{ hand: number, gesture: string }> = [];
for (let i = 0; i < res.length; i++) {
const fingers = [];
const fingers: Array<{ name: string, position: number }> = [];
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
// @ts-ignore
if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
}
if (fingers && fingers.length > 0) {

View File

@ -49,7 +49,7 @@ export class HandDetector {
scoresT.dispose();
filteredT.dispose();
const hands = [];
const hands: Array<{ box: any, palmLandmarks: any, confidence: number }> = [];
for (const index of filtered) {
if (scores[index] >= config.hand.minConfidence) {
const matchingBox = tf.slice(boxes, [index, 0], [1, -1]);
@ -70,7 +70,7 @@ export class HandDetector {
const image = tf.tidy(() => input.resizeBilinear([config.hand.inputSize, config.hand.inputSize]).div(127.5).sub(1));
const predictions = await this.getBoxes(image, config);
image.dispose();
const hands = [];
const hands: Array<{}> = [];
if (!predictions || predictions.length === 0) return hands;
for (const prediction of predictions) {
const boxes = prediction.box.dataSync();

View File

@ -91,7 +91,7 @@ export class HandPipeline {
// for (const possible of boxes) this.storedBoxes.push(possible);
if (this.storedBoxes.length > 0) useFreshBox = true;
}
const hands = [];
const hands: Array<{}> = [];
// log('hand', `skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
// go through working set of boxes
@ -124,10 +124,7 @@ export class HandPipeline {
const result = {
landmarks: coords,
confidence,
box: {
topLeft: nextBoundingBox.startPoint,
bottomRight: nextBoundingBox.endPoint,
},
box: { topLeft: nextBoundingBox.startPoint, bottomRight: nextBoundingBox.endPoint },
};
hands.push(result);
} else {
@ -139,10 +136,7 @@ export class HandPipeline {
const enlarged = box.enlargeBox(box.squarifyBox(currentBox), HAND_BOX_ENLARGE_FACTOR);
const result = {
confidence: currentBox.confidence,
box: {
topLeft: enlarged.startPoint,
bottomRight: enlarged.endPoint,
},
box: { topLeft: enlarged.startPoint, bottomRight: enlarged.endPoint },
};
hands.push(result);
}

View File

@ -29,7 +29,7 @@ export class HandPose {
async estimateHands(input, config) {
const predictions = await this.handPipeline.estimateHands(input, config);
if (!predictions) return [];
const hands = [];
const hands: Array<{ confidence: number, box: any, landmarks: any, annotations: any }> = [];
for (const prediction of predictions) {
const annotations = {};
if (prediction.landmarks) {
@ -43,12 +43,7 @@ export class HandPose {
Math.min(input.shape[2], prediction.box.bottomRight[0]) - prediction.box.topLeft[0],
Math.min(input.shape[1], prediction.box.bottomRight[1]) - prediction.box.topLeft[1],
] : 0;
hands.push({
confidence: prediction.confidence,
box,
landmarks: prediction.landmarks,
annotations,
});
hands.push({ confidence: prediction.confidence, box, landmarks: prediction.landmarks, annotations });
}
return hands;
}

View File

@ -18,7 +18,7 @@ export function dot(v1, v2) {
}
export function getColumnFrom2DArr(arr, columnIndex) {
const column = [];
const column: Array<number> = [];
for (let i = 0; i < arr.length; i++) {
column.push(arr[i][columnIndex]);
}
@ -26,7 +26,7 @@ export function getColumnFrom2DArr(arr, columnIndex) {
}
export function multiplyTransformMatrices(mat1, mat2) {
const product = [];
const product: Array<number[]> = [];
const size = mat1.length;
for (let row = 0; row < size; row++) {
product.push([]);

View File

@ -236,7 +236,7 @@ class Human {
let genderRes;
let emotionRes;
let embeddingRes;
const faceRes = [];
const faceRes: Array<{ confidence: number, box: any, mesh: any, meshRaw: any, boxRaw: any, annotations: any, age: number, gender: string, genderConfidence: number, emotion: string, embedding: any, iris: number }> = [];
this.state = 'run:face';
timeStamp = now();
const faces = await this.models.face?.estimateFaces(input, this.config);
@ -349,8 +349,8 @@ class Human {
this.state = 'image';
this.config = mergeDeep(this.config, userConfig);
const process = image.process(input, this.config);
process.tensor.dispose();
return process.canvas;
process?.tensor?.dispose();
return process?.canvas;
}
// main detect function
@ -445,6 +445,7 @@ class Human {
let gestureRes = [];
if (this.config.gesture.enabled) {
timeStamp = now();
// @ts-ignore
gestureRes = [...gesture.face(faceRes), ...gesture.body(poseRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture) delete this.perf.gesture;
@ -495,8 +496,8 @@ class Human {
canvas.width = size;
canvas.height = size;
const ctx = canvas.getContext('2d');
ctx.drawImage(img, 0, 0);
const data = ctx.getImageData(0, 0, size, size);
ctx?.drawImage(img, 0, 0);
const data = ctx?.getImageData(0, 0, size, size);
this.detect(data, this.config).then((res) => resolve(res));
};
if (src) img.src = src;

View File

@ -1,3 +1,5 @@
// @ts-nocheck
import { log } from './log';
import * as tf from '../dist/tfjs.esm.js';
import * as fxImage from './imagefx';

View File

@ -20,7 +20,7 @@ function getInstanceScore(existingPoses, squaredNmsRadius, instanceKeypoints) {
}
export function decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, config) {
const poses = [];
const poses: Array<{ keypoints: any, score: number }> = [];
const queue = buildParts.buildPartWithScoreQueue(config.body.scoreThreshold, kLocalMaximumRadius, scoresBuffer);
const squaredNmsRadius = config.body.nmsRadius ^ 2;
// Generate at most maxDetections object instances per image in decreasing root part score order.

View File

@ -20,7 +20,7 @@ function getOffsetPoint(y, x, keypoint, offsetsBuffer) {
}
export function getOffsetVectors(heatMapCoordsBuffer, offsetsBuffer) {
const result = [];
const result: Array<number> = [];
for (let keypoint = 0; keypoint < kpt.NUM_KEYPOINTS; keypoint++) {
const heatmapY = heatMapCoordsBuffer.get(keypoint, 0).valueOf();
const heatmapX = heatMapCoordsBuffer.get(keypoint, 1).valueOf();

View File

@ -24,12 +24,14 @@ export function register() {
if (!tf.findBackend(config.name)) {
log('backend registration:', config.name);
try {
// @ts-ignore
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
} catch (err) {
log('error: cannot create canvas:', err);
return;
}
try {
// @ts-ignore
config.gl = config.canvas.getContext('webgl2', config.webGLattr);
} catch (err) {
log('error: cannot get WebGL2 context:', err);
@ -60,6 +62,7 @@ export function register() {
}
try {
tf.ENV.set('WEBGL_VERSION', 2);
// @ts-ignore
tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE));
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);

View File

@ -17,9 +17,9 @@
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": false,
"strictNullChecks": false,
"strictNullChecks": true,
"allowJs": true
},
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
"include": ["src/*", "demo/*"],
"include": ["src/*", "src/***/*", "demo/*"],
}

2
types/age/age.d.ts vendored Normal file
View File

@ -0,0 +1,2 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<unknown>;

22
types/blazeface/blazeface.d.ts vendored Normal file
View File

@ -0,0 +1,22 @@
export declare const disposeBox: (box: any) => void;
export declare class BlazeFaceModel {
blazeFaceModel: any;
width: number;
height: number;
anchorsData: any;
anchors: any;
inputSize: number;
config: any;
scaleFaces: number;
constructor(model: any, config: any);
getBoundingBoxes(inputImage: any): Promise<{
boxes: {
box: any;
landmarks: any;
anchor: any;
confidence: number;
}[];
scaleFactor: number[];
} | null>;
}
export declare function load(config: any): Promise<BlazeFaceModel>;

17
types/blazeface/box.d.ts vendored Normal file
View File

@ -0,0 +1,17 @@
export declare function scaleBoxCoordinates(box: any, factor: any): {
startPoint: number[];
endPoint: number[];
};
export declare function getBoxSize(box: any): number[];
export declare function getBoxCenter(box: any): any[];
export declare function cutBoxFromImageAndResize(box: any, image: any, cropSize: any): any;
export declare function enlargeBox(box: any, factor?: number): {
startPoint: number[];
endPoint: any[];
landmarks: any;
};
export declare function squarifyBox(box: any): {
startPoint: number[];
endPoint: any[];
landmarks: any;
};

49
types/blazeface/coords.d.ts vendored Normal file
View File

@ -0,0 +1,49 @@
export declare const MESH_ANNOTATIONS: {
silhouette: number[];
lipsUpperOuter: number[];
lipsLowerOuter: number[];
lipsUpperInner: number[];
lipsLowerInner: number[];
rightEyeUpper0: number[];
rightEyeLower0: number[];
rightEyeUpper1: number[];
rightEyeLower1: number[];
rightEyeUpper2: number[];
rightEyeLower2: number[];
rightEyeLower3: number[];
rightEyebrowUpper: number[];
rightEyebrowLower: number[];
rightEyeIris: number[];
leftEyeUpper0: number[];
leftEyeLower0: number[];
leftEyeUpper1: number[];
leftEyeLower1: number[];
leftEyeUpper2: number[];
leftEyeLower2: number[];
leftEyeLower3: number[];
leftEyebrowUpper: number[];
leftEyebrowLower: number[];
leftEyeIris: number[];
midwayBetweenEyes: number[];
noseTip: number[];
noseBottom: number[];
noseRightCorner: number[];
noseLeftCorner: number[];
rightCheek: number[];
leftCheek: number[];
};
export declare const MESH_TO_IRIS_INDICES_MAP: {
key: string;
indices: number[];
}[];
export declare const UV468: number[][];
export declare const TRI468: number[];
export declare const TRI68: number[];
export declare const TRI33: number[];
export declare const TRI7: number[];
export declare const VTX68: number[];
export declare const VTX33: number[];
export declare const VTX7: number[];
export declare const UV68: number[][];
export declare const UV33: number[][];
export declare const UV7: number[][];

7
types/blazeface/facemesh.d.ts vendored Normal file
View File

@ -0,0 +1,7 @@
export declare class MediaPipeFaceMesh {
facePipeline: any;
config: any;
constructor(blazeFace: any, blazeMeshModel: any, irisModel: any, config: any);
estimateFaces(input: any, config: any): Promise<{}[]>;
}
export declare function load(config: any): Promise<MediaPipeFaceMesh>;

36
types/blazeface/facepipeline.d.ts vendored Normal file
View File

@ -0,0 +1,36 @@
export declare class Pipeline {
storedBoxes: any;
runsWithoutFaceDetector: number;
boundingBoxDetector: any;
meshDetector: any;
irisModel: any;
meshWidth: number;
meshHeight: number;
irisSize: number;
irisEnlarge: number;
skipped: number;
detectedFaces: number;
constructor(boundingBoxDetector: any, meshDetector: any, irisModel: any, config: any);
transformRawCoords(rawCoords: any, box: any, angle: any, rotationMatrix: any): any;
getLeftToRightEyeDepthDifference(rawCoords: any): number;
getEyeBox(rawCoords: any, face: any, eyeInnerCornerIndex: any, eyeOuterCornerIndex: any, flip?: boolean): {
box: {
startPoint: number[];
endPoint: any[];
landmarks: any;
};
boxSize: number[];
crop: any;
};
getEyeCoords(eyeData: any, eyeBox: any, eyeBoxSize: any, flip?: boolean): {
rawCoords: any[][];
iris: any[][];
};
getAdjustedIrisCoords(rawCoords: any, irisCoords: any, direction: any): any;
predict(input: any, config: any): Promise<any>;
calculateLandmarksBoundingBox(landmarks: any): {
startPoint: number[];
endPoint: number[];
landmarks: any;
};
}

21
types/blazeface/util.d.ts vendored Normal file
View File

@ -0,0 +1,21 @@
export declare const IDENTITY_MATRIX: number[][];
/**
* Normalizes the provided angle to the range -pi to pi.
* @param angle The angle in radians to be normalized.
*/
export declare function normalizeRadians(angle: any): number;
/**
* Computes the angle of rotation between two anchor points.
* @param point1 First anchor point
* @param point2 Second anchor point
*/
export declare function computeRotation(point1: any, point2: any): number;
export declare function radToDegrees(rad: any): number;
export declare function buildTranslationMatrix(x: any, y: any): any[][];
export declare function dot(v1: any, v2: any): number;
export declare function getColumnFrom2DArr(arr: any, columnIndex: any): number[];
export declare function multiplyTransformMatrices(mat1: any, mat2: any): number[][];
export declare function buildRotationMatrix(rotation: any, center: any): number[][];
export declare function invertTransformMatrix(matrix: any): any[][];
export declare function rotatePoint(homogeneousCoordinate: any, rotationMatrix: any): number[];
export declare function xyDistanceBetweenPoints(a: any, b: any): number;

3
types/embedding/embedding.d.ts vendored Normal file
View File

@ -0,0 +1,3 @@
export declare function load(config: any): Promise<any>;
export declare function simmilarity(embedding1: any, embedding2: any): number;
export declare function predict(image: any, config: any): Promise<unknown>;

2
types/emotion/emotion.d.ts vendored Normal file
View File

@ -0,0 +1,2 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<unknown>;

13
types/faceboxes/faceboxes.d.ts vendored Normal file
View File

@ -0,0 +1,13 @@
export declare class FaceBoxes {
enlarge: number;
model: any;
config: any;
constructor(model: any, config: any);
estimateFaces(input: any, config: any): Promise<{
confidence: number;
box: any;
boxRaw: any;
image: any;
}[]>;
}
export declare function load(config: any): Promise<FaceBoxes>;

2
types/gender/gender.d.ts vendored Normal file
View File

@ -0,0 +1,2 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<unknown>;

16
types/gesture/gesture.d.ts vendored Normal file
View File

@ -0,0 +1,16 @@
export declare const body: (res: any) => {
body: number;
gesture: string;
}[];
export declare const face: (res: any) => {
face: number;
gesture: string;
}[];
export declare const iris: (res: any) => {
iris: number;
gesture: string;
}[];
export declare const hand: (res: any) => {
hand: number;
gesture: string;
}[];

6
types/handpose/anchors.d.ts vendored Normal file
View File

@ -0,0 +1,6 @@
export declare const anchors: {
w: number;
h: number;
x_center: number;
y_center: number;
}[];

24
types/handpose/box.d.ts vendored Normal file
View File

@ -0,0 +1,24 @@
export declare function getBoxSize(box: any): number[];
export declare function getBoxCenter(box: any): any[];
export declare function cutBoxFromImageAndResize(box: any, image: any, cropSize: any): any;
export declare function scaleBoxCoordinates(box: any, factor: any): {
startPoint: number[];
endPoint: number[];
palmLandmarks: any;
confidence: any;
};
export declare function enlargeBox(box: any, factor?: number): {
startPoint: number[];
endPoint: any[];
palmLandmarks: any;
};
export declare function squarifyBox(box: any): {
startPoint: number[];
endPoint: any[];
palmLandmarks: any;
};
export declare function shiftBox(box: any, shiftFactor: any): {
startPoint: any[];
endPoint: any[];
palmLandmarks: any;
};

16
types/handpose/handdetector.d.ts vendored Normal file
View File

@ -0,0 +1,16 @@
export declare class HandDetector {
model: any;
anchors: any;
anchorsTensor: any;
inputSizeTensor: any;
doubleInputSizeTensor: any;
constructor(model: any, inputSize: any, anchorsAnnotated: any);
normalizeBoxes(boxes: any): any;
normalizeLandmarks(rawPalmLandmarks: any, index: any): any;
getBoxes(input: any, config: any): Promise<{
box: any;
palmLandmarks: any;
confidence: number;
}[]>;
estimateHandBounds(input: any, config: any): Promise<{}[]>;
}

25
types/handpose/handpipeline.d.ts vendored Normal file
View File

@ -0,0 +1,25 @@
export declare class HandPipeline {
handDetector: any;
landmarkDetector: any;
inputSize: number;
storedBoxes: any;
skipped: number;
detectedHands: number;
constructor(handDetector: any, landmarkDetector: any, inputSize: any);
getBoxForPalmLandmarks(palmLandmarks: any, rotationMatrix: any): {
startPoint: number[];
endPoint: any[];
palmLandmarks: any;
};
getBoxForHandLandmarks(landmarks: any): {
startPoint: number[];
endPoint: any[];
palmLandmarks: any;
};
transformRawCoords(rawCoords: any, box2: any, angle: any, rotationMatrix: any): any;
estimateHands(image: any, config: any): Promise<{}[]>;
calculateLandmarksBoundingBox(landmarks: any): {
startPoint: number[];
endPoint: number[];
};
}

19
types/handpose/handpose.d.ts vendored Normal file
View File

@ -0,0 +1,19 @@
export declare class HandPose {
handPipeline: any;
constructor(handPipeline: any);
static getAnnotations(): {
thumb: number[];
indexFinger: number[];
middleFinger: number[];
ringFinger: number[];
pinky: number[];
palmBase: number[];
};
estimateHands(input: any, config: any): Promise<{
confidence: number;
box: any;
landmarks: any;
annotations: any;
}[]>;
}
export declare function load(config: any): Promise<HandPose>;

9
types/handpose/util.d.ts vendored Normal file
View File

@ -0,0 +1,9 @@
export declare function normalizeRadians(angle: any): number;
export declare function computeRotation(point1: any, point2: any): number;
export declare const buildTranslationMatrix: (x: any, y: any) => any[][];
export declare function dot(v1: any, v2: any): number;
export declare function getColumnFrom2DArr(arr: any, columnIndex: any): number[];
export declare function multiplyTransformMatrices(mat1: any, mat2: any): number[][];
export declare function buildRotationMatrix(rotation: any, center: any): number[][];
export declare function invertTransformMatrix(matrix: any): any[][];
export declare function rotatePoint(homogeneousCoordinate: any, rotationMatrix: any): number[];

47
types/human.d.ts vendored Normal file
View File

@ -0,0 +1,47 @@
declare class Human {
tf: any;
version: string;
config: any;
fx: any;
state: string;
numTensors: number;
analyzeMemoryLeaks: boolean;
checkSanity: boolean;
firstRun: boolean;
perf: any;
models: any;
facemesh: any;
age: any;
gender: any;
emotion: any;
body: any;
hand: any;
constructor(userConfig?: {});
profile(): {};
analyze(...msg: any[]): void;
sanity(input: any): "input is not defined" | "input must be a tensor" | "backend not loaded" | null;
simmilarity(embedding1: any, embedding2: any): number;
load(userConfig?: null): Promise<void>;
checkBackend(force?: boolean): Promise<void>;
detectFace(input: any): Promise<{
confidence: number;
box: any;
mesh: any;
meshRaw: any;
boxRaw: any;
annotations: any;
age: number;
gender: string;
genderConfidence: number;
emotion: string;
embedding: any;
iris: number;
}[]>;
image(input: any, userConfig?: {}): Promise<null | undefined>;
detect(input: any, userConfig?: {}): Promise<unknown>;
warmupBitmap(): Promise<any>;
warmupCanvas(): Promise<unknown>;
warmupNode(): Promise<unknown>;
warmup(userConfig: any): Promise<any>;
}
export { Human as default };

4
types/image.d.ts vendored Normal file
View File

@ -0,0 +1,4 @@
export declare function process(input: any, config: any): {
tensor: any;
canvas: null;
} | null;

1
types/log.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export declare function log(...msg: any[]): void;

2
types/posenet/buildParts.d.ts vendored Normal file
View File

@ -0,0 +1,2 @@
import * as heapSort from './heapSort';
export declare function buildPartWithScoreQueue(scoreThreshold: any, localMaximumRadius: any, scores: any): heapSort.MaxHeap;

4
types/posenet/decodeMultiple.d.ts vendored Normal file
View File

@ -0,0 +1,4 @@
export declare function decodeMultiplePoses(scoresBuffer: any, offsetsBuffer: any, displacementsFwdBuffer: any, displacementsBwdBuffer: any, config: any): {
keypoints: any;
score: number;
}[];

12
types/posenet/decodePose.d.ts vendored Normal file
View File

@ -0,0 +1,12 @@
export declare function decodePose(root: any, scores: any, offsets: any, outputStride: any, displacementsFwd: any, displacementsBwd: any): any[];
export declare function decodeSinglePose(heatmapScores: any, offsets: any, config: any): Promise<{
keypoints: {
position: {
y: any;
x: any;
};
part: string;
score: number;
}[];
score: number;
}>;

4
types/posenet/decoders.d.ts vendored Normal file
View File

@ -0,0 +1,4 @@
export declare function getPointsConfidence(heatmapScores: any, heatMapCoords: any): Float32Array;
export declare function getOffsetVectors(heatMapCoordsBuffer: any, offsetsBuffer: any): any;
export declare function getOffsetPoints(heatMapCoordsBuffer: any, outputStride: any, offsetsBuffer: any): any;
export declare function argmax2d(inputs: any): any;

17
types/posenet/heapSort.d.ts vendored Normal file
View File

@ -0,0 +1,17 @@
export declare class MaxHeap {
priorityQueue: any;
numberOfElements: number;
getElementValue: any;
constructor(maxSize: any, getElementValue: any);
enqueue(x: any): void;
dequeue(): any;
empty(): boolean;
size(): number;
all(): any;
max(): any;
swim(k: any): void;
sink(k: any): void;
getValueAt(i: any): any;
less(i: any, j: any): boolean;
exchange(i: any, j: any): void;
}

6
types/posenet/keypoints.d.ts vendored Normal file
View File

@ -0,0 +1,6 @@
export declare const partNames: string[];
export declare const NUM_KEYPOINTS: any;
export declare const partIds: any;
export declare const connectedPartIndices: any[][];
export declare const poseChain: string[][];
export declare const partChannels: string[];

6
types/posenet/modelBase.d.ts vendored Normal file
View File

@ -0,0 +1,6 @@
export declare class BaseModel {
model: any;
constructor(model: any);
predict(input: any, config: any): any;
dispose(): void;
}

7
types/posenet/posenet.d.ts vendored Normal file
View File

@ -0,0 +1,7 @@
export declare class PoseNet {
baseModel: any;
constructor(model: any);
estimatePoses(input: any, config: any): Promise<unknown>;
dispose(): void;
}
export declare function load(config: any): Promise<PoseNet>;

14
types/posenet/util.d.ts vendored Normal file
View File

@ -0,0 +1,14 @@
export declare function eitherPointDoesntMeetConfidence(a: any, b: any, minConfidence: any): boolean;
export declare function getAdjacentKeyPoints(keypoints: any, minConfidence: any): any[];
export declare function getBoundingBox(keypoints: any): any;
export declare function getBoundingBoxPoints(keypoints: any): {
x: any;
y: any;
}[];
export declare function toTensorBuffers3D(tensors: any): Promise<[unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown]>;
export declare function scalePose(pose: any, scaleY: any, scaleX: any): {
score: any;
keypoints: any;
};
export declare function resizeTo(image: any, [targetH, targetW]: [any, any]): any;
export declare function scaleAndFlipPoses(poses: any, [height, width]: [any, any], [inputResolutionHeight, inputResolutionWidth]: [any, any]): any;

19
types/posenet/vectors.d.ts vendored Normal file
View File

@ -0,0 +1,19 @@
export declare function getOffsetPoint(y: any, x: any, keypoint: any, offsets: any): {
y: any;
x: any;
};
export declare function getImageCoords(part: any, outputStride: any, offsets: any): {
x: any;
y: any;
};
export declare function fillArray(element: any, size: any): any[];
export declare function clamp(a: any, min: any, max: any): any;
export declare function squaredDistance(y1: any, x1: any, y2: any, x2: any): number;
export declare function addVectors(a: any, b: any): {
x: any;
y: any;
};
export declare function clampVector(a: any, min: any, max: any): {
y: any;
x: any;
};

2
types/profile.d.ts vendored Normal file
View File

@ -0,0 +1,2 @@
export declare const data: {};
export declare function run(name: any, raw: any): void;

2
types/sample.d.ts vendored Normal file

File diff suppressed because one or more lines are too long

19
types/tfjs/backend.d.ts vendored Normal file
View File

@ -0,0 +1,19 @@
export declare const config: {
name: string;
priority: number;
canvas: null;
gl: null;
width: number;
height: number;
webGLattr: {
alpha: boolean;
antialias: boolean;
premultipliedAlpha: boolean;
preserveDrawingBuffer: boolean;
depth: boolean;
stencil: boolean;
failIfMajorPerformanceCaveat: boolean;
desynchronized: boolean;
};
};
export declare function register(): void;

2
wiki

@ -1 +1 @@
Subproject commit d88043901df1d671a0897275bae8d0f537b84ff5
Subproject commit d365c4e487508181811ea7ff1a26d682ef6896b7