stricter linting, fix face annotations

pull/70/head
Vladimir Mandic 2020-12-27 08:12:22 -05:00
parent 90a121a362
commit cdd13e9fd9
42 changed files with 258 additions and 325 deletions

View File

@ -252,7 +252,6 @@ function webWorker(input, image, canvas, timestamp) {
// create new webworker and add event handler only once // create new webworker and add event handler only once
log('creating worker thread'); log('creating worker thread');
worker = new Worker(ui.worker, { type: 'module' }); worker = new Worker(ui.worker, { type: 'module' });
console.log('worker', worker);
// after receiving message from webworker, parse&draw results and send new frame for processing // after receiving message from webworker, parse&draw results and send new frame for processing
worker.addEventListener('message', (msg) => { worker.addEventListener('message', (msg) => {
if (msg.data.result.performance && msg.data.result.performance.total) ui.detectFPS.push(1000 / msg.data.result.performance.total); if (msg.data.result.performance && msg.data.result.performance.total) ui.detectFPS.push(1000 / msg.data.result.performance.total);
@ -270,7 +269,7 @@ function webWorker(input, image, canvas, timestamp) {
}); });
} }
// pass image data as arraybuffer to worker by reference to avoid copy // pass image data as arraybuffer to worker by reference to avoid copy
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, userConfig }, [image.data.buffer]); worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, userConfig }, 'Human', [image.data.buffer]);
} }
// main processing function when input is webcam, can use direct invocation or web worker // main processing function when input is webcam, can use direct invocation or web worker

View File

@ -1,3 +1,4 @@
// @ts-nocheck
/* eslint-disable max-len */ /* eslint-disable max-len */
// based on: https://github.com/munrocket/gl-bench // based on: https://github.com/munrocket/gl-bench

View File

@ -1,3 +1,5 @@
// @ts-nocheck
let instance = 0; let instance = 0;
let CSScreated = false; let CSScreated = false;

View File

@ -24,6 +24,6 @@ onmessage = async (msg) => {
} }
// must strip canvas from return value as it cannot be transfered from worker thread // must strip canvas from return value as it cannot be transfered from worker thread
if (result.canvas) result.canvas = null; if (result.canvas) result.canvas = null;
postMessage({ result }); postMessage({ result }, 'Human');
busy = false; busy = false;
}; };

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytes": 1839826, "bytes": 1838836,
"imports": [] "imports": []
}, },
"demo/draw.js": { "demo/draw.js": {
@ -9,15 +9,15 @@
"imports": [] "imports": []
}, },
"demo/menu.js": { "demo/menu.js": {
"bytes": 13858, "bytes": 13874,
"imports": [] "imports": []
}, },
"demo/gl-bench.js": { "demo/gl-bench.js": {
"bytes": 10782, "bytes": 10797,
"imports": [] "imports": []
}, },
"demo/browser.js": { "demo/browser.js": {
"bytes": 25480, "bytes": 25454,
"imports": [ "imports": [
{ {
"path": "dist/human.esm.js" "path": "dist/human.esm.js"
@ -38,14 +38,14 @@
"dist/demo-browser-index.js.map": { "dist/demo-browser-index.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 1870322 "bytes": 1868174
}, },
"dist/demo-browser-index.js": { "dist/demo-browser-index.js": {
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytesInOutput": 1832586 "bytesInOutput": 1832304
}, },
"demo/draw.js": { "demo/draw.js": {
"bytesInOutput": 7726 "bytesInOutput": 7726
@ -57,10 +57,10 @@
"bytesInOutput": 7382 "bytesInOutput": 7382
}, },
"demo/browser.js": { "demo/browser.js": {
"bytesInOutput": 19563 "bytesInOutput": 19542
} }
}, },
"bytes": 1886446 "bytes": 1885439
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

22
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

54
dist/human.esm.json vendored
View File

@ -9,7 +9,7 @@
"imports": [] "imports": []
}, },
"src/tfjs/backend.js": { "src/tfjs/backend.js": {
"bytes": 1340, "bytes": 1376,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -17,7 +17,7 @@
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6986, "bytes": 7024,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -44,7 +44,7 @@
"imports": [] "imports": []
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 14062, "bytes": 14306,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -64,7 +64,7 @@
] ]
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 3054, "bytes": 2991,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -92,7 +92,7 @@
] ]
}, },
"src/age/age.js": { "src/age/age.js": {
"bytes": 2017, "bytes": 2037,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -106,7 +106,7 @@
] ]
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytes": 2886, "bytes": 2906,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -120,7 +120,7 @@
] ]
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytes": 3055, "bytes": 3077,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -134,7 +134,7 @@
] ]
}, },
"src/embedding/embedding.js": { "src/embedding/embedding.js": {
"bytes": 2041, "bytes": 2063,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -191,7 +191,7 @@
] ]
}, },
"src/body/decodePose.js": { "src/body/decodePose.js": {
"bytes": 5216, "bytes": 5368,
"imports": [ "imports": [
{ {
"path": "src/body/keypoints.js" "path": "src/body/keypoints.js"
@ -205,7 +205,7 @@
] ]
}, },
"src/body/decodeMultiple.js": { "src/body/decodeMultiple.js": {
"bytes": 2303, "bytes": 2373,
"imports": [ "imports": [
{ {
"path": "src/body/buildParts.js" "path": "src/body/buildParts.js"
@ -227,7 +227,7 @@
] ]
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytes": 2395, "bytes": 2519,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -250,7 +250,7 @@
] ]
}, },
"src/body/posenet.js": { "src/body/posenet.js": {
"bytes": 614, "bytes": 712,
"imports": [ "imports": [
{ {
"path": "src/body/modelPoseNet.js" "path": "src/body/modelPoseNet.js"
@ -264,7 +264,7 @@
] ]
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytes": 3226, "bytes": 2522,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -272,7 +272,7 @@
] ]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4253, "bytes": 3548,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -283,11 +283,11 @@
] ]
}, },
"src/hand/util.js": { "src/hand/util.js": {
"bytes": 3030, "bytes": 2346,
"imports": [] "imports": []
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7951, "bytes": 7246,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -308,7 +308,7 @@
"imports": [] "imports": []
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytes": 3250, "bytes": 2578,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -332,11 +332,11 @@
"imports": [] "imports": []
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytes": 19352, "bytes": 19445,
"imports": [] "imports": []
}, },
"src/image.js": { "src/image.js": {
"bytes": 5841, "bytes": 5871,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -362,7 +362,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 16727, "bytes": 17375,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -419,7 +419,7 @@
"dist/human.esm.js.map": { "dist/human.esm.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 1776771 "bytes": 1774476
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -443,7 +443,7 @@
"bytesInOutput": 9513 "bytesInOutput": 9513
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2378 "bytesInOutput": 2317
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 846 "bytesInOutput": 846
@ -503,16 +503,16 @@
"bytesInOutput": 127032 "bytesInOutput": 127032
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytesInOutput": 2022 "bytesInOutput": 2018
}, },
"src/gesture/gesture.js": { "src/gesture/gesture.js": {
"bytesInOutput": 2463 "bytesInOutput": 2463
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 13576 "bytesInOutput": 13628
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 4041 "bytesInOutput": 3637
}, },
"src/log.js": { "src/log.js": {
"bytesInOutput": 266 "bytesInOutput": 266
@ -524,7 +524,7 @@
"bytesInOutput": 938 "bytesInOutput": 938
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 11039 "bytesInOutput": 11170
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytesInOutput": 1473 "bytesInOutput": 1473
@ -542,7 +542,7 @@
"bytesInOutput": 22 "bytesInOutput": 22
} }
}, },
"bytes": 1839826 "bytes": 1838836
} }
} }
} }

22
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

54
dist/human.json vendored
View File

@ -9,7 +9,7 @@
"imports": [] "imports": []
}, },
"src/tfjs/backend.js": { "src/tfjs/backend.js": {
"bytes": 1340, "bytes": 1376,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -17,7 +17,7 @@
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6986, "bytes": 7024,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -44,7 +44,7 @@
"imports": [] "imports": []
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 14062, "bytes": 14306,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -64,7 +64,7 @@
] ]
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 3054, "bytes": 2991,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -92,7 +92,7 @@
] ]
}, },
"src/age/age.js": { "src/age/age.js": {
"bytes": 2017, "bytes": 2037,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -106,7 +106,7 @@
] ]
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytes": 2886, "bytes": 2906,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -120,7 +120,7 @@
] ]
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytes": 3055, "bytes": 3077,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -134,7 +134,7 @@
] ]
}, },
"src/embedding/embedding.js": { "src/embedding/embedding.js": {
"bytes": 2041, "bytes": 2063,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -191,7 +191,7 @@
] ]
}, },
"src/body/decodePose.js": { "src/body/decodePose.js": {
"bytes": 5216, "bytes": 5368,
"imports": [ "imports": [
{ {
"path": "src/body/keypoints.js" "path": "src/body/keypoints.js"
@ -205,7 +205,7 @@
] ]
}, },
"src/body/decodeMultiple.js": { "src/body/decodeMultiple.js": {
"bytes": 2303, "bytes": 2373,
"imports": [ "imports": [
{ {
"path": "src/body/buildParts.js" "path": "src/body/buildParts.js"
@ -227,7 +227,7 @@
] ]
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytes": 2395, "bytes": 2519,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -250,7 +250,7 @@
] ]
}, },
"src/body/posenet.js": { "src/body/posenet.js": {
"bytes": 614, "bytes": 712,
"imports": [ "imports": [
{ {
"path": "src/body/modelPoseNet.js" "path": "src/body/modelPoseNet.js"
@ -264,7 +264,7 @@
] ]
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytes": 3226, "bytes": 2522,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -272,7 +272,7 @@
] ]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4253, "bytes": 3548,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -283,11 +283,11 @@
] ]
}, },
"src/hand/util.js": { "src/hand/util.js": {
"bytes": 3030, "bytes": 2346,
"imports": [] "imports": []
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7951, "bytes": 7246,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -308,7 +308,7 @@
"imports": [] "imports": []
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytes": 3250, "bytes": 2578,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -332,11 +332,11 @@
"imports": [] "imports": []
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytes": 19352, "bytes": 19445,
"imports": [] "imports": []
}, },
"src/image.js": { "src/image.js": {
"bytes": 5841, "bytes": 5871,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -362,7 +362,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 16727, "bytes": 17375,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -419,7 +419,7 @@
"dist/human.js.map": { "dist/human.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 1776778 "bytes": 1774483
}, },
"dist/human.js": { "dist/human.js": {
"imports": [], "imports": [],
@ -441,7 +441,7 @@
"bytesInOutput": 9513 "bytesInOutput": 9513
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2378 "bytesInOutput": 2317
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 846 "bytesInOutput": 846
@ -501,19 +501,19 @@
"bytesInOutput": 127032 "bytesInOutput": 127032
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytesInOutput": 2022 "bytesInOutput": 2018
}, },
"src/gesture/gesture.js": { "src/gesture/gesture.js": {
"bytesInOutput": 2463 "bytesInOutput": 2463
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 13576 "bytesInOutput": 13628
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 4040 "bytesInOutput": 3636
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 11100 "bytesInOutput": 11231
}, },
"src/log.js": { "src/log.js": {
"bytesInOutput": 266 "bytesInOutput": 266
@ -540,7 +540,7 @@
"bytesInOutput": 22 "bytesInOutput": 22
} }
}, },
"bytes": 1839900 "bytes": 1838910
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

22
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

58
dist/human.node.json vendored
View File

@ -9,7 +9,7 @@
"imports": [] "imports": []
}, },
"src/tfjs/backend.js": { "src/tfjs/backend.js": {
"bytes": 1340, "bytes": 1376,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -17,7 +17,7 @@
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6986, "bytes": 7024,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -44,7 +44,7 @@
"imports": [] "imports": []
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 14062, "bytes": 14306,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -64,7 +64,7 @@
] ]
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 3054, "bytes": 2991,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -92,7 +92,7 @@
] ]
}, },
"src/age/age.js": { "src/age/age.js": {
"bytes": 2017, "bytes": 2037,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -106,7 +106,7 @@
] ]
}, },
"src/gender/gender.js": { "src/gender/gender.js": {
"bytes": 2886, "bytes": 2906,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -120,7 +120,7 @@
] ]
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytes": 3055, "bytes": 3077,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -134,7 +134,7 @@
] ]
}, },
"src/embedding/embedding.js": { "src/embedding/embedding.js": {
"bytes": 2041, "bytes": 2063,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -191,7 +191,7 @@
] ]
}, },
"src/body/decodePose.js": { "src/body/decodePose.js": {
"bytes": 5216, "bytes": 5368,
"imports": [ "imports": [
{ {
"path": "src/body/keypoints.js" "path": "src/body/keypoints.js"
@ -205,7 +205,7 @@
] ]
}, },
"src/body/decodeMultiple.js": { "src/body/decodeMultiple.js": {
"bytes": 2303, "bytes": 2373,
"imports": [ "imports": [
{ {
"path": "src/body/buildParts.js" "path": "src/body/buildParts.js"
@ -227,7 +227,7 @@
] ]
}, },
"src/body/modelPoseNet.js": { "src/body/modelPoseNet.js": {
"bytes": 2395, "bytes": 2519,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -250,7 +250,7 @@
] ]
}, },
"src/body/posenet.js": { "src/body/posenet.js": {
"bytes": 614, "bytes": 712,
"imports": [ "imports": [
{ {
"path": "src/body/modelPoseNet.js" "path": "src/body/modelPoseNet.js"
@ -264,7 +264,7 @@
] ]
}, },
"src/hand/box.js": { "src/hand/box.js": {
"bytes": 3226, "bytes": 2522,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -272,7 +272,7 @@
] ]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4253, "bytes": 3548,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -283,11 +283,11 @@
] ]
}, },
"src/hand/util.js": { "src/hand/util.js": {
"bytes": 3030, "bytes": 2346,
"imports": [] "imports": []
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7951, "bytes": 7246,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -308,7 +308,7 @@
"imports": [] "imports": []
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytes": 3250, "bytes": 2578,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -332,11 +332,11 @@
"imports": [] "imports": []
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytes": 19352, "bytes": 19445,
"imports": [] "imports": []
}, },
"src/image.js": { "src/image.js": {
"bytes": 5841, "bytes": 5871,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -362,7 +362,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 16727, "bytes": 17375,
"imports": [ "imports": [
{ {
"path": "src/log.js" "path": "src/log.js"
@ -419,7 +419,7 @@
"dist/human.node-gpu.js.map": { "dist/human.node-gpu.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 702698 "bytes": 700374
}, },
"dist/human.node-gpu.js": { "dist/human.node-gpu.js": {
"imports": [], "imports": [],
@ -444,7 +444,7 @@
"bytesInOutput": 9563 "bytesInOutput": 9563
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2421 "bytesInOutput": 2360
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 846 "bytesInOutput": 846
@ -495,28 +495,28 @@
"bytesInOutput": 634 "bytesInOutput": 634
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytesInOutput": 2880 "bytesInOutput": 2876
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytesInOutput": 4559 "bytesInOutput": 4555
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
"bytesInOutput": 127034 "bytesInOutput": 127034
}, },
"src/hand/handpose.js": { "src/hand/handpose.js": {
"bytesInOutput": 2064 "bytesInOutput": 2060
}, },
"src/gesture/gesture.js": { "src/gesture/gesture.js": {
"bytesInOutput": 2467 "bytesInOutput": 2467
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 13558 "bytesInOutput": 13610
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 4069 "bytesInOutput": 3665
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 11126 "bytesInOutput": 11257
}, },
"src/log.js": { "src/log.js": {
"bytesInOutput": 266 "bytesInOutput": 266
@ -540,7 +540,7 @@
"bytesInOutput": 20 "bytesInOutput": 20
} }
}, },
"bytes": 310199 "bytes": 309201
} }
} }
} }

View File

@ -46,6 +46,7 @@ async function predict(image, config) {
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {}; const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone(); ageT = profileAge.result.clone();
profileAge.result.dispose(); profileAge.result.dispose();
// @ts-ignore
profile.run('age', profileAge); profile.run('age', profileAge);
} }
enhance.dispose(); enhance.dispose();

View File

@ -7,6 +7,7 @@ const kLocalMaximumRadius = 1;
function withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, { x, y }, keypointId) { function withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, { x, y }, keypointId) {
return poses.some(({ keypoints }) => { return poses.some(({ keypoints }) => {
const correspondingKeypoint = keypoints[keypointId].position; const correspondingKeypoint = keypoints[keypointId].position;
// @ts-ignore
return vectors.squaredDistance(y, x, correspondingKeypoint.y, correspondingKeypoint.x) <= squaredNmsRadius; return vectors.squaredDistance(y, x, correspondingKeypoint.y, correspondingKeypoint.x) <= squaredNmsRadius;
}); });
} }
@ -21,6 +22,7 @@ function getInstanceScore(existingPoses, squaredNmsRadius, instanceKeypoints) {
function decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, config) { function decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, config) {
const poses = []; const poses = [];
// @ts-ignore
const queue = buildParts.buildPartWithScoreQueue(config.body.scoreThreshold, kLocalMaximumRadius, scoresBuffer); const queue = buildParts.buildPartWithScoreQueue(config.body.scoreThreshold, kLocalMaximumRadius, scoresBuffer);
const squaredNmsRadius = config.body.nmsRadius ^ 2; const squaredNmsRadius = config.body.nmsRadius ^ 2;
// Generate at most maxDetections object instances per image in decreasing root part score order. // Generate at most maxDetections object instances per image in decreasing root part score order.
@ -28,9 +30,11 @@ function decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer
// The top element in the queue is the next root candidate. // The top element in the queue is the next root candidate.
const root = queue.dequeue(); const root = queue.dequeue();
// Part-based non-maximum suppression: We reject a root candidate if it is within a disk of `nmsRadius` pixels from the corresponding part of a previously detected instance. // Part-based non-maximum suppression: We reject a root candidate if it is within a disk of `nmsRadius` pixels from the corresponding part of a previously detected instance.
// @ts-ignore
const rootImageCoords = vectors.getImageCoords(root.part, config.body.outputStride, offsetsBuffer); const rootImageCoords = vectors.getImageCoords(root.part, config.body.outputStride, offsetsBuffer);
if (withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue; if (withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue;
// Else start a new detection instance at the position of the root. // Else start a new detection instance at the position of the root.
// @ts-ignore
const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, config.body.outputStride, displacementsFwdBuffer, displacementsBwdBuffer); const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, config.body.outputStride, displacementsFwdBuffer, displacementsBwdBuffer);
const score = getInstanceScore(poses, squaredNmsRadius, keypoints); const score = getInstanceScore(poses, squaredNmsRadius, keypoints);
if (score > config.body.scoreThreshold) poses.push({ keypoints, score }); if (score > config.body.scoreThreshold) poses.push({ keypoints, score });

View File

@ -14,7 +14,9 @@ function getDisplacement(edgeId, point, displacements) {
} }
function getStridedIndexNearPoint(point, outputStride, height, width) { function getStridedIndexNearPoint(point, outputStride, height, width) {
return { return {
// @ts-ignore
y: vectors.clamp(Math.round(point.y / outputStride), 0, height - 1), y: vectors.clamp(Math.round(point.y / outputStride), 0, height - 1),
// @ts-ignore
x: vectors.clamp(Math.round(point.x / outputStride), 0, width - 1), x: vectors.clamp(Math.round(point.x / outputStride), 0, width - 1),
}; };
} }
@ -24,11 +26,14 @@ function traverseToTargetKeypoint(edgeId, sourceKeypoint, targetKeypointId, scor
// Nearest neighbor interpolation for the source->target displacements. // Nearest neighbor interpolation for the source->target displacements.
const sourceKeypointIndices = getStridedIndexNearPoint(sourceKeypoint.position, outputStride, height, width); const sourceKeypointIndices = getStridedIndexNearPoint(sourceKeypoint.position, outputStride, height, width);
const displacement = getDisplacement(edgeId, sourceKeypointIndices, displacements); const displacement = getDisplacement(edgeId, sourceKeypointIndices, displacements);
// @ts-ignore
const displacedPoint = vectors.addVectors(sourceKeypoint.position, displacement); const displacedPoint = vectors.addVectors(sourceKeypoint.position, displacement);
let targetKeypoint = displacedPoint; let targetKeypoint = displacedPoint;
for (let i = 0; i < offsetRefineStep; i++) { for (let i = 0; i < offsetRefineStep; i++) {
const targetKeypointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width); const targetKeypointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);
// @ts-ignore
const offsetPoint = vectors.getOffsetPoint(targetKeypointIndices.y, targetKeypointIndices.x, targetKeypointId, offsets); const offsetPoint = vectors.getOffsetPoint(targetKeypointIndices.y, targetKeypointIndices.x, targetKeypointId, offsets);
// @ts-ignore
targetKeypoint = vectors.addVectors({ targetKeypoint = vectors.addVectors({
x: targetKeypointIndices.x * outputStride, x: targetKeypointIndices.x * outputStride,
y: targetKeypointIndices.y * outputStride, y: targetKeypointIndices.y * outputStride,
@ -45,6 +50,7 @@ function decodePose(root, scores, offsets, outputStride, displacementsFwd, displ
const instanceKeypoints = new Array(numParts); const instanceKeypoints = new Array(numParts);
// Start a new detection instance at the position of the root. // Start a new detection instance at the position of the root.
const { part: rootPart, score: rootScore } = root; const { part: rootPart, score: rootScore } = root;
// @ts-ignore
const rootPoint = vectors.getImageCoords(rootPart, outputStride, offsets); const rootPoint = vectors.getImageCoords(rootPart, outputStride, offsets);
instanceKeypoints[rootPart.id] = { instanceKeypoints[rootPart.id] = {
score: rootScore, score: rootScore,
@ -73,13 +79,16 @@ exports.decodePose = decodePose;
async function decodeSinglePose(heatmapScores, offsets, config) { async function decodeSinglePose(heatmapScores, offsets, config) {
let totalScore = 0.0; let totalScore = 0.0;
// @ts-ignore
const heatmapValues = decoders.argmax2d(heatmapScores); const heatmapValues = decoders.argmax2d(heatmapScores);
const allTensorBuffers = await Promise.all([heatmapScores.buffer(), offsets.buffer(), heatmapValues.buffer()]); const allTensorBuffers = await Promise.all([heatmapScores.buffer(), offsets.buffer(), heatmapValues.buffer()]);
const scoresBuffer = allTensorBuffers[0]; const scoresBuffer = allTensorBuffers[0];
const offsetsBuffer = allTensorBuffers[1]; const offsetsBuffer = allTensorBuffers[1];
const heatmapValuesBuffer = allTensorBuffers[2]; const heatmapValuesBuffer = allTensorBuffers[2];
// @ts-ignore
const offsetPoints = decoders.getOffsetPoints(heatmapValuesBuffer, config.body.outputStride, offsetsBuffer); const offsetPoints = decoders.getOffsetPoints(heatmapValuesBuffer, config.body.outputStride, offsetsBuffer);
const offsetPointsBuffer = await offsetPoints.buffer(); const offsetPointsBuffer = await offsetPoints.buffer();
// @ts-ignore
const keypointConfidence = Array.from(decoders.getPointsConfidence(scoresBuffer, heatmapValuesBuffer)); const keypointConfidence = Array.from(decoders.getPointsConfidence(scoresBuffer, heatmapValuesBuffer));
const instanceKeypoints = keypointConfidence.map((score, i) => { const instanceKeypoints = keypointConfidence.map((score, i) => {
totalScore += score; totalScore += score;

View File

@ -9,12 +9,15 @@ async function estimateMultiple(input, res, config) {
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const height = input.shape[1]; const height = input.shape[1];
const width = input.shape[2]; const width = input.shape[2];
// @ts-ignore
const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]); const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]);
const scoresBuffer = allTensorBuffers[0]; const scoresBuffer = allTensorBuffers[0];
const offsetsBuffer = allTensorBuffers[1]; const offsetsBuffer = allTensorBuffers[1];
const displacementsFwdBuffer = allTensorBuffers[2]; const displacementsFwdBuffer = allTensorBuffers[2];
const displacementsBwdBuffer = allTensorBuffers[3]; const displacementsBwdBuffer = allTensorBuffers[3];
// @ts-ignore
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, config); const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, config);
// @ts-ignore
const scaled = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]); const scaled = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]);
resolve(scaled); resolve(scaled);
}); });
@ -24,8 +27,10 @@ async function estimateSingle(input, res, config) {
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const height = input.shape[1]; const height = input.shape[1];
const width = input.shape[2]; const width = input.shape[2];
// @ts-ignore
const pose = await decodePose.decodeSinglePose(res.heatmapScores, res.offsets, config); const pose = await decodePose.decodeSinglePose(res.heatmapScores, res.offsets, config);
const poses = [pose]; const poses = [pose];
// @ts-ignore
const scaled = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]); const scaled = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]);
resolve(scaled); resolve(scaled);
}); });
@ -37,6 +42,7 @@ class PoseNet {
} }
async estimatePoses(input, config) { async estimatePoses(input, config) {
// @ts-ignore
const resized = util.resizeTo(input, [config.body.inputSize, config.body.inputSize]); const resized = util.resizeTo(input, [config.body.inputSize, config.body.inputSize]);
const res = this.baseModel.predict(resized, config); const res = this.baseModel.predict(resized, config);
@ -59,6 +65,7 @@ exports.PoseNet = PoseNet;
async function load(config) { async function load(config) {
const model = await tf.loadGraphModel(config.body.modelPath); const model = await tf.loadGraphModel(config.body.modelPath);
// @ts-ignore
const mobilenet = new modelBase.BaseModel(model); const mobilenet = new modelBase.BaseModel(model);
log(`load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`); log(`load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`);
return new PoseNet(mobilenet); return new PoseNet(mobilenet);

View File

@ -2,15 +2,22 @@ import * as modelPoseNet from './modelPoseNet';
import * as keypoints from './keypoints'; import * as keypoints from './keypoints';
import * as util from './util'; import * as util from './util';
// @ts-ignore
exports.load = modelPoseNet.load; exports.load = modelPoseNet.load;
// @ts-ignore
exports.PoseNet = modelPoseNet.PoseNet; exports.PoseNet = modelPoseNet.PoseNet;
exports.partChannels = keypoints.partChannels; exports.partChannels = keypoints.partChannels;
exports.partIds = keypoints.partIds; exports.partIds = keypoints.partIds;
exports.partNames = keypoints.partNames; exports.partNames = keypoints.partNames;
exports.poseChain = keypoints.poseChain; exports.poseChain = keypoints.poseChain;
// @ts-ignore
exports.getAdjacentKeyPoints = util.getAdjacentKeyPoints; exports.getAdjacentKeyPoints = util.getAdjacentKeyPoints;
// @ts-ignore
exports.getBoundingBox = util.getBoundingBox; exports.getBoundingBox = util.getBoundingBox;
// @ts-ignore
exports.getBoundingBoxPoints = util.getBoundingBoxPoints; exports.getBoundingBoxPoints = util.getBoundingBoxPoints;
// @ts-ignore
exports.scaleAndFlipPoses = util.scaleAndFlipPoses; exports.scaleAndFlipPoses = util.scaleAndFlipPoses;
// @ts-ignore
exports.scalePose = util.scalePose; exports.scalePose = util.scalePose;

View File

@ -39,6 +39,7 @@ async function predict(image, config) {
const profileData = await tf.profile(() => models.embedding.predict({ img_inputs: resize })); const profileData = await tf.profile(() => models.embedding.predict({ img_inputs: resize }));
data = [...profileData.result.dataSync()]; data = [...profileData.result.dataSync()];
profileData.result.dispose(); profileData.result.dispose();
// @ts-ignore
profile.run('emotion', profileData); profile.run('emotion', profileData);
} }
} }

View File

@ -65,6 +65,7 @@ async function predict(image, config) {
const profileData = await tf.profile(() => models.emotion.predict(normalize)); const profileData = await tf.profile(() => models.emotion.predict(normalize));
data = profileData.result.dataSync(); data = profileData.result.dataSync();
profileData.result.dispose(); profileData.result.dispose();
// @ts-ignore
profile.run('emotion', profileData); profile.run('emotion', profileData);
} }
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {

View File

@ -134,11 +134,13 @@ class BlazeFaceModel {
} }
async estimateFaces(input) { async estimateFaces(input) {
// @ts-ignore
const { boxes, scaleFactor } = await this.getBoundingBoxes(input); const { boxes, scaleFactor } = await this.getBoundingBoxes(input);
const faces = []; const faces = [];
for (const face of boxes) { for (const face of boxes) {
const landmarkData = face.landmarks.arraySync(); const landmarkData = face.landmarks.arraySync();
const scaledBox = scaleBoxFromPrediction(face, scaleFactor); const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
// @ts-ignore
const boxData = scaleBox.arraySync(); const boxData = scaleBox.arraySync();
const probabilityData = face.probability.arraySync(); const probabilityData = face.probability.arraySync();
const anchor = face.anchor; const anchor = face.anchor;

View File

@ -6,6 +6,7 @@ import * as coords from './coords.js';
class MediaPipeFaceMesh { class MediaPipeFaceMesh {
constructor(blazeFace, blazeMeshModel, irisModel, config) { constructor(blazeFace, blazeMeshModel, irisModel, config) {
// @ts-ignore
this.facePipeline = new facepipeline.Pipeline(blazeFace, blazeMeshModel, irisModel, config); this.facePipeline = new facepipeline.Pipeline(blazeFace, blazeMeshModel, irisModel, config);
this.config = config; this.config = config;
} }
@ -19,21 +20,17 @@ class MediaPipeFaceMesh {
const meshRaw = prediction.rawCoords; const meshRaw = prediction.rawCoords;
const annotations = {}; const annotations = {};
if (mesh && mesh.length > 0) { if (mesh && mesh.length > 0) {
for (let key = 0; key < coords.MESH_ANNOTATIONS.length; key++) { for (const key of Object.keys(coords.MESH_ANNOTATIONS)) {
if (config.face.iris.enabled || key.includes('Iris') === false) { annotations[key] = coords.MESH_ANNOTATIONS[key].map((index) => mesh[index]);
annotations[key] = coords.MESH_ANNOTATIONS[key].map((index) => mesh[index]);
}
} }
} }
const boxRaw = (config.face.mesh.returnRawData && prediction.box) ? { topLeft: prediction.box.startPoint, bottomRight: prediction.box.endPoint } : null; const boxRaw = (config.face.mesh.returnRawData && prediction.box) ? { topLeft: prediction.box.startPoint, bottomRight: prediction.box.endPoint } : null;
const box = prediction.box ? [ const box = prediction.box ? [
Math.max(0, prediction.box.startPoint[0]), Math.max(0, prediction.box.startPoint[0]),
Math.max(0, prediction.box.startPoint[1]), Math.max(0, prediction.box.startPoint[1]),
Math.min(input.shape[2], prediction.box.endPoint[0]) - prediction.box.startPoint[0], Math.min(input.shape[2], prediction.box.endPoint[0]) - prediction.box.startPoint[0],
Math.min(input.shape[1], prediction.box.endPoint[1]) - prediction.box.startPoint[1], Math.min(input.shape[1], prediction.box.endPoint[1]) - prediction.box.startPoint[1],
] : 0; ] : 0;
results.push({ results.push({
confidence: prediction.confidence || 0, confidence: prediction.confidence || 0,
box, box,
@ -53,6 +50,7 @@ class MediaPipeFaceMesh {
let faceModels = [null, null, null]; let faceModels = [null, null, null];
async function load(config) { async function load(config) {
faceModels = await Promise.all([ faceModels = await Promise.all([
// @ts-ignore
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null, (!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(config.face.mesh.modelPath, { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null, (!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(config.face.mesh.modelPath, { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null,
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(config.face.iris.modelPath, { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null, (!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(config.face.iris.modelPath, { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null,

View File

@ -56,6 +56,7 @@ class Pipeline {
} }
transformRawCoords(rawCoords, box, angle, rotationMatrix) { transformRawCoords(rawCoords, box, angle, rotationMatrix) {
// @ts-ignore
const boxSize = bounding.getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint }); const boxSize = bounding.getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint });
const scaleFactor = [boxSize[0] / this.meshWidth, boxSize[1] / this.meshHeight]; const scaleFactor = [boxSize[0] / this.meshWidth, boxSize[1] / this.meshHeight];
const coordsScaled = rawCoords.map((coord) => ([ const coordsScaled = rawCoords.map((coord) => ([
@ -65,6 +66,7 @@ class Pipeline {
const coordsRotationMatrix = (angle !== 0) ? util.buildRotationMatrix(angle, [0, 0]) : util.IDENTITY_MATRIX; const coordsRotationMatrix = (angle !== 0) ? util.buildRotationMatrix(angle, [0, 0]) : util.IDENTITY_MATRIX;
const coordsRotated = (angle !== 0) ? coordsScaled.map((coord) => ([...util.rotatePoint(coord, coordsRotationMatrix), coord[2]])) : coordsScaled; const coordsRotated = (angle !== 0) ? coordsScaled.map((coord) => ([...util.rotatePoint(coord, coordsRotationMatrix), coord[2]])) : coordsScaled;
const inverseRotationMatrix = (angle !== 0) ? util.invertTransformMatrix(rotationMatrix) : util.IDENTITY_MATRIX; const inverseRotationMatrix = (angle !== 0) ? util.invertTransformMatrix(rotationMatrix) : util.IDENTITY_MATRIX;
// @ts-ignore
const boxCenter = [...bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }), 1]; const boxCenter = [...bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }), 1];
return coordsRotated.map((coord) => ([ return coordsRotated.map((coord) => ([
coord[0] + util.dot(boxCenter, inverseRotationMatrix[0]), coord[0] + util.dot(boxCenter, inverseRotationMatrix[0]),
@ -81,7 +83,9 @@ class Pipeline {
// Returns a box describing a cropped region around the eye fit for passing to the iris model. // Returns a box describing a cropped region around the eye fit for passing to the iris model.
getEyeBox(rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) { getEyeBox(rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) {
// @ts-ignore
const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge)); const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge));
// @ts-ignore
const boxSize = bounding.getBoxSize(box); const boxSize = bounding.getBoxSize(box);
let crop = tf.image.cropAndResize(face, [[ let crop = tf.image.cropAndResize(face, [[
box.startPoint[1] / this.meshHeight, box.startPoint[1] / this.meshHeight,
@ -155,8 +159,11 @@ class Pipeline {
return null; return null;
} }
for (let i = 0; i < this.storedBoxes.length; i++) { for (let i = 0; i < this.storedBoxes.length; i++) {
// @ts-ignore
const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor); const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);
// @ts-ignore
const enlargedBox = bounding.enlargeBox(scaledBox); const enlargedBox = bounding.enlargeBox(scaledBox);
// @ts-ignore
const squarifiedBox = bounding.squarifyBox(enlargedBox); const squarifiedBox = bounding.squarifyBox(enlargedBox);
const landmarks = this.storedBoxes[i].landmarks.arraySync(); const landmarks = this.storedBoxes[i].landmarks.arraySync();
const confidence = this.storedBoxes[i].confidence; const confidence = this.storedBoxes[i].confidence;
@ -181,14 +188,17 @@ class Pipeline {
if (config.face.detector.rotation) { if (config.face.detector.rotation) {
const [indexOfMouth, indexOfForehead] = (box.landmarks.length >= LANDMARKS_COUNT) ? MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES : BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES; const [indexOfMouth, indexOfForehead] = (box.landmarks.length >= LANDMARKS_COUNT) ? MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES : BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;
angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]); angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]);
// @ts-ignore
const faceCenter = bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }); const faceCenter = bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]]; const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized); const rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter); rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
// @ts-ignore
face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, rotatedImage, [this.meshHeight, this.meshWidth]).div(255); face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
} else { } else {
rotationMatrix = util.IDENTITY_MATRIX; rotationMatrix = util.IDENTITY_MATRIX;
const cloned = input.clone(); const cloned = input.clone();
// @ts-ignore
face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, cloned, [this.meshHeight, this.meshWidth]).div(255); face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, cloned, [this.meshHeight, this.meshWidth]).div(255);
} }
@ -235,7 +245,9 @@ class Pipeline {
} }
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix); const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
// @ts-ignore
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData)); const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
// @ts-ignore
const squarifiedLandmarksBox = bounding.squarifyBox(landmarksBox); const squarifiedLandmarksBox = bounding.squarifyBox(landmarksBox);
const transformedCoords = tf.tensor2d(transformedCoordsData); const transformedCoords = tf.tensor2d(transformedCoordsData);
const prediction = { const prediction = {

View File

@ -53,6 +53,7 @@ async function predict(image, config) {
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {}; const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone(); genderT = profileGender.result.clone();
profileGender.result.dispose(); profileGender.result.dispose();
// @ts-ignore
profile.run('gender', profileGender); profile.run('gender', profileGender);
} }
enhance.dispose(); enhance.dispose();

View File

@ -1,19 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
function getBoxSize(box) { function getBoxSize(box) {

View File

@ -1,20 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box'; import * as box from './box';

View File

@ -1,20 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box'; import * as box from './box';
import * as util from './util'; import * as util from './util';

View File

@ -1,19 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html // https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
import { log } from '../log.js'; import { log } from '../log.js';
@ -74,7 +58,9 @@ async function load(config) {
config.hand.enabled ? tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null, config.hand.enabled ? tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
config.hand.landmarks ? tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null, config.hand.landmarks ? tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
]); ]);
// @ts-ignore
const handDetector = new handdetector.HandDetector(handDetectorModel, config.hand.inputSize, anchors.anchors); const handDetector = new handdetector.HandDetector(handDetectorModel, config.hand.inputSize, anchors.anchors);
// @ts-ignore
const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, config.hand.inputSize); const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, config.hand.inputSize);
const handPose = new HandPose(handPipeline); const handPose = new HandPose(handPipeline);
if (config.hand.enabled) log(`load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`); if (config.hand.enabled) log(`load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`);

View File

@ -1,19 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
function normalizeRadians(angle) { function normalizeRadians(angle) {
return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI)); return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));
} }
@ -42,6 +26,7 @@ function multiplyTransformMatrices(mat1, mat2) {
for (let row = 0; row < size; row++) { for (let row = 0; row < size; row++) {
product.push([]); product.push([]);
for (let col = 0; col < size; col++) { for (let col = 0; col < size; col++) {
// @ts-ignore
product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col))); product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));
} }
} }

View File

@ -18,6 +18,7 @@ import * as app from '../package.json';
// helper function: gets elapsed time on both browser and nodejs // helper function: gets elapsed time on both browser and nodejs
const now = () => { const now = () => {
if (typeof performance !== 'undefined') return performance.now(); if (typeof performance !== 'undefined') return performance.now();
// @ts-ignore
return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000); return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);
}; };
@ -72,6 +73,7 @@ class Human {
} }
profile() { profile() {
// @ts-ignore
if (this.config.profile) return profile.data; if (this.config.profile) return profile.data;
return {}; return {};
} }
@ -102,6 +104,7 @@ class Human {
} }
simmilarity(embedding1, embedding2) { simmilarity(embedding1, embedding2) {
// @ts-ignore
if (this.config.face.embedding.enabled) return embedding.simmilarity(embedding1, embedding2); if (this.config.face.embedding.enabled) return embedding.simmilarity(embedding1, embedding2);
return 0; return 0;
} }
@ -132,21 +135,35 @@ class Human {
this.models.posenet, this.models.posenet,
this.models.handpose, this.models.handpose,
] = await Promise.all([ ] = await Promise.all([
// @ts-ignore
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config) : null), this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config) : null),
// @ts-ignore
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null), this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
// @ts-ignore
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null), this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
// @ts-ignore
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null), this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
// @ts-ignore
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null), this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
// @ts-ignore
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null), this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
// @ts-ignore
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null), this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
]); ]);
} else { } else {
// @ts-ignore
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config); if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config); if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config); if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config); if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config); if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
// @ts-ignore
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config); if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
// @ts-ignore
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config); if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
@ -213,7 +230,8 @@ class Human {
const faceRes = []; const faceRes = [];
this.state = 'run:face'; this.state = 'run:face';
timeStamp = now(); timeStamp = now();
const faces = await this.models.facemesh.estimateFaces(input, this.config); // @ts-ignore
const faces = await this.models.facemesh?.estimateFaces(input, this.config);
this.perf.face = Math.trunc(now() - timeStamp); this.perf.face = Math.trunc(now() - timeStamp);
for (const face of faces) { for (const face of faces) {
this.analyze('Get Face'); this.analyze('Get Face');
@ -227,10 +245,12 @@ class Human {
// run age, inherits face from blazeface // run age, inherits face from blazeface
this.analyze('Start Age:'); this.analyze('Start Age:');
if (this.config.async) { if (this.config.async) {
// @ts-ignore
ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {}; ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};
} else { } else {
this.state = 'run:age'; this.state = 'run:age';
timeStamp = now(); timeStamp = now();
// @ts-ignore
ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {}; ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};
this.perf.age = Math.trunc(now() - timeStamp); this.perf.age = Math.trunc(now() - timeStamp);
} }
@ -238,10 +258,12 @@ class Human {
// run gender, inherits face from blazeface // run gender, inherits face from blazeface
this.analyze('Start Gender:'); this.analyze('Start Gender:');
if (this.config.async) { if (this.config.async) {
// @ts-ignore
genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {}; genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};
} else { } else {
this.state = 'run:gender'; this.state = 'run:gender';
timeStamp = now(); timeStamp = now();
// @ts-ignore
genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {}; genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};
this.perf.gender = Math.trunc(now() - timeStamp); this.perf.gender = Math.trunc(now() - timeStamp);
} }
@ -249,10 +271,12 @@ class Human {
// run emotion, inherits face from blazeface // run emotion, inherits face from blazeface
this.analyze('Start Emotion:'); this.analyze('Start Emotion:');
if (this.config.async) { if (this.config.async) {
// @ts-ignore
emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {}; emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};
} else { } else {
this.state = 'run:emotion'; this.state = 'run:emotion';
timeStamp = now(); timeStamp = now();
// @ts-ignore
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {}; emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
this.perf.emotion = Math.trunc(now() - timeStamp); this.perf.emotion = Math.trunc(now() - timeStamp);
} }
@ -261,10 +285,12 @@ class Human {
// run emotion, inherits face from blazeface // run emotion, inherits face from blazeface
this.analyze('Start Embedding:'); this.analyze('Start Embedding:');
if (this.config.async) { if (this.config.async) {
// @ts-ignore
embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face.image, this.config) : {}; embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face.image, this.config) : {};
} else { } else {
this.state = 'run:embedding'; this.state = 'run:embedding';
timeStamp = now(); timeStamp = now();
// @ts-ignore
embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face.image, this.config) : {}; embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face.image, this.config) : {};
this.perf.embedding = Math.trunc(now() - timeStamp); this.perf.embedding = Math.trunc(now() - timeStamp);
} }
@ -291,7 +317,6 @@ class Human {
confidence: face.confidence, confidence: face.confidence,
box: face.box, box: face.box,
mesh: face.mesh, mesh: face.mesh,
// AT: boxRaw, meshRaw
boxRaw: face.boxRaw, boxRaw: face.boxRaw,
meshRaw: face.meshRaw, meshRaw: face.meshRaw,
annotations: face.annotations, annotations: face.annotations,
@ -317,6 +342,7 @@ class Human {
async image(input, userConfig = {}) { async image(input, userConfig = {}) {
this.state = 'image'; this.state = 'image';
this.config = mergeDeep(this.config, userConfig); this.config = mergeDeep(this.config, userConfig);
// @ts-ignore
const process = image.process(input, this.config); const process = image.process(input, this.config);
process.tensor.dispose(); process.tensor.dispose();
return process.canvas; return process.canvas;
@ -356,6 +382,7 @@ class Human {
this.analyze('Start Scope:'); this.analyze('Start Scope:');
timeStamp = now(); timeStamp = now();
// @ts-ignore
const process = image.process(input, this.config); const process = image.process(input, this.config);
if (!process || !process.tensor) { if (!process || !process.tensor) {
log('could not convert input to tensor'); log('could not convert input to tensor');
@ -379,12 +406,14 @@ class Human {
// run posenet // run posenet
this.analyze('Start Body:'); this.analyze('Start Body:');
if (this.config.async) { if (this.config.async) {
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : []; // @ts-ignore
poseRes = this.config.body.enabled ? this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
if (this.perf.body) delete this.perf.body; if (this.perf.body) delete this.perf.body;
} else { } else {
this.state = 'run:body'; this.state = 'run:body';
timeStamp = now(); timeStamp = now();
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : []; // @ts-ignore
poseRes = this.config.body.enabled ? await this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
this.perf.body = Math.trunc(now() - timeStamp); this.perf.body = Math.trunc(now() - timeStamp);
} }
this.analyze('End Body:'); this.analyze('End Body:');
@ -392,12 +421,14 @@ class Human {
// run handpose // run handpose
this.analyze('Start Hand:'); this.analyze('Start Hand:');
if (this.config.async) { if (this.config.async) {
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config) : []; // @ts-ignore
handRes = this.config.hand.enabled ? this.models.handpose?.estimateHands(process.tensor, this.config) : [];
if (this.perf.hand) delete this.perf.hand; if (this.perf.hand) delete this.perf.hand;
} else { } else {
this.state = 'run:hand'; this.state = 'run:hand';
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config) : []; // @ts-ignore
handRes = this.config.hand.enabled ? await this.models.handpose?.estimateHands(process.tensor, this.config) : [];
this.perf.hand = Math.trunc(now() - timeStamp); this.perf.hand = Math.trunc(now() - timeStamp);
} }
// this.analyze('End Hand:'); // this.analyze('End Hand:');

View File

@ -41,6 +41,7 @@ function process(input, config) {
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height; if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined') this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
} }
if (!this.fx) return inCanvas;
this.fx.reset(); this.fx.reset();
this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled
if (config.filter.contrast !== 0) this.fx.addFilter('contrast', config.filter.contrast); if (config.filter.contrast !== 0) this.fx.addFilter('contrast', config.filter.contrast);
@ -58,8 +59,8 @@ function process(input, config) {
if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate); if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);
this.fx.apply(inCanvas); this.fx.apply(inCanvas);
// read pixel data // read pixel data
// const gl = outCanvas.getContext('webgl'); /*
const gl = false; const gl = outCanvas.getContext('webgl');
if (gl) { if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4); const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3); const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
@ -77,6 +78,7 @@ function process(input, config) {
} }
outCanvas.data = pixBuffer; outCanvas.data = pixBuffer;
} }
*/
} else { } else {
outCanvas = inCanvas; outCanvas = inCanvas;
} }
@ -93,8 +95,8 @@ function process(input, config) {
tempCanvas.width = targetWidth; tempCanvas.width = targetWidth;
tempCanvas.height = targetHeight; tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext('2d'); const tempCtx = tempCanvas.getContext('2d');
tempCtx.drawImage(outCanvas, 0, 0); tempCtx?.drawImage(outCanvas, 0, 0);
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight); const data = tempCtx?.getImageData(0, 0, targetWidth, targetHeight);
pixels = tf.browser.fromPixels(data); pixels = tf.browser.fromPixels(data);
} }
const casted = pixels.toFloat(); const casted = pixels.toFloat();

View File

@ -20,6 +20,7 @@ const WebGLProgram = function (gl, vertexSource, fragmentSource) {
gl.compileShader(shader); gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
// @ts-ignore
throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader)); throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));
} }
return shader; return shader;
@ -37,6 +38,7 @@ const WebGLProgram = function (gl, vertexSource, fragmentSource) {
gl.linkProgram(this.id); gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) { if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) {
// @ts-ignore
throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id)); throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));
} }
@ -149,9 +151,8 @@ const WebGLImageFilter = function (params) {
}; };
const _getTempFramebuffer = function (index) { const _getTempFramebuffer = function (index) {
_tempFramebuffers[index] = _tempFramebuffers[index] // @ts-ignore
|| _createFramebufferTexture(_width, _height); _tempFramebuffers[index] = _tempFramebuffers[index] || _createFramebufferTexture(_width, _height);
return _tempFramebuffers[index]; return _tempFramebuffers[index];
}; };
@ -190,7 +191,8 @@ const WebGLImageFilter = function (params) {
source = _sourceTexture; source = _sourceTexture;
} else { } else {
// All following draw calls use the temp buffer last drawn to // All following draw calls use the temp buffer last drawn to
source = _getTempFramebuffer(_currentFramebufferIndex).texture; // @ts-ignore
source = _getTempFramebuffer(_currentFramebufferIndex)?.texture;
} }
_drawCount++; _drawCount++;
@ -203,7 +205,8 @@ const WebGLImageFilter = function (params) {
} else { } else {
// Intermediate draw call - get a temp buffer to draw to // Intermediate draw call - get a temp buffer to draw to
_currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2; _currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;
target = _getTempFramebuffer(_currentFramebufferIndex).fbo; // @ts-ignore
target = _getTempFramebuffer(_currentFramebufferIndex)?.fbo;
} }
// Bind the source and target and draw the two triangles // Bind the source and target and draw the two triangles

View File

@ -20,7 +20,9 @@ export const config = {
export function register() { export function register() {
if (!tf.findBackend(config.name)) { if (!tf.findBackend(config.name)) {
// @ts-ignore
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas'); config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
// @ts-ignore
const gl = config.canvas.getContext('webgl2', config.webGLattr); const gl = config.canvas.getContext('webgl2', config.webGLattr);
tf.setWebGLContext(2, gl); tf.setWebGLContext(2, gl);
const ctx = new tf.GPGPUContext(gl); const ctx = new tf.GPGPUContext(gl);

25
tsconfig.json Normal file
View File

@ -0,0 +1,25 @@
{
"compilerOptions": {
"module": "es2020",
"target": "es2018",
"moduleResolution": "node",
"lib": ["es2018", "dom"],
"typeRoots": ["node_modules/@types"],
"outDir": "types",
"declaration": true,
"emitDeclarationOnly": true,
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"importHelpers": true,
"noImplicitAny": false,
"preserveConstEnums": true,
"removeComments": false,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": false,
"strictNullChecks": false,
"allowJs": true
},
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
"include": ["src/*", "demo/*"],
}

2
wiki

@ -1 +1 @@
Subproject commit 699af2235b315ef24766839ddc49a198f7cc21c3 Subproject commit 43425df86424e9b5f4bec3510f3565963255d4a9