minor compatibility fixes

pull/50/head
Vladimir Mandic 2020-11-26 10:37:04 -05:00
parent bef5619d5d
commit 7d0e63981c
27 changed files with 115 additions and 117 deletions

View File

@ -134,11 +134,11 @@ async function drawResults(input) {
const avgDetect = Math.trunc(10 * ui.detectFPS.reduce((a, b) => a + b, 0) / ui.detectFPS.length) / 10; const avgDetect = Math.trunc(10 * ui.detectFPS.reduce((a, b) => a + b, 0) / ui.detectFPS.length) / 10;
const avgDraw = Math.trunc(10 * ui.drawFPS.reduce((a, b) => a + b, 0) / ui.drawFPS.length) / 10; const avgDraw = Math.trunc(10 * ui.drawFPS.reduce((a, b) => a + b, 0) / ui.drawFPS.length) / 10;
const warning = (ui.detectFPS.length > 5) && (avgDetect < 5) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : ''; const warning = (ui.detectFPS.length > 5) && (avgDetect < 5) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
document.getElementById('log').innerText = ` document.getElementById('log').innerHTML = `
video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing} video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
backend: ${human.tf.getBackend()} | ${memory} backend: ${human.tf.getBackend()} | ${memory}<br>
performance: ${str(result.performance)}ms FPS process:${avgDetect} refresh:${avgDraw} performance: ${str(result.performance)}ms FPS process:${avgDetect} refresh:${avgDraw}<br>
${warning} ${warning}<br>
`; `;
ui.framesDraw++; ui.framesDraw++;

View File

@ -4,7 +4,7 @@ async function drawGesture(result, canvas, ui) {
ctx.font = ui.baseFont; ctx.font = ui.baseFont;
ctx.fillStyle = ui.baseLabel; ctx.fillStyle = ui.baseLabel;
let i = 1; let i = 1;
for (const gesture in result) { for (let gesture = 0; gesture < result.length; gesture++) {
const [where, what] = Object.entries(result[gesture]); const [where, what] = Object.entries(result[gesture]);
if ((what.length > 1) && (what[1].length > 0)) { if ((what.length > 1) && (what[1].length > 0)) {
const person = where[1] > 0 ? `#${where[1]}` : ''; const person = where[1] > 0 ? `#${where[1]}` : '';
@ -115,14 +115,14 @@ async function drawBody(result, canvas, ui) {
if (!result) return; if (!result) return;
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d');
ctx.lineJoin = 'round'; ctx.lineJoin = 'round';
for (const i in result) { for (let i = 0; i < result.length; i++) {
if (!lastDrawnPose[i] && ui.buffered) lastDrawnPose[i] = { ...result[i] }; if (!lastDrawnPose[i] && ui.buffered) lastDrawnPose[i] = { ...result[i] };
ctx.fillStyle = ui.baseColor; ctx.fillStyle = ui.baseColor;
ctx.strokeStyle = ui.baseColor; ctx.strokeStyle = ui.baseColor;
ctx.font = ui.baseFont; ctx.font = ui.baseFont;
ctx.lineWidth = ui.baseLineWidth; ctx.lineWidth = ui.baseLineWidth;
if (ui.drawPoints) { if (ui.drawPoints) {
for (const pt in result[i].keypoints) { for (let pt = 0; pt < result[i].keypoints.length; pt++) {
ctx.beginPath(); ctx.beginPath();
if (ui.buffered) { if (ui.buffered) {
lastDrawnPose[i].keypoints[pt].position.x = (lastDrawnPose[i].keypoints[pt].position.x + result[i].keypoints[pt].position.x) / 2; lastDrawnPose[i].keypoints[pt].position.x = (lastDrawnPose[i].keypoints[pt].position.x + result[i].keypoints[pt].position.x) / 2;

View File

@ -300,7 +300,7 @@ class Menu {
const width = canvas.width / values.length; const width = canvas.width / values.length;
const max = 1 + Math.max(...values); const max = 1 + Math.max(...values);
const height = canvas.height / max; const height = canvas.height / max;
for (const i in values) { for (let i = 0; i < values.length; i++) {
const gradient = ctx.createLinearGradient(0, (max - values[i]) * height, 0, 0); const gradient = ctx.createLinearGradient(0, (max - values[i]) * height, 0, 0);
gradient.addColorStop(0.1, theme.chartColor); gradient.addColorStop(0.1, theme.chartColor);
gradient.addColorStop(0.4, theme.background); gradient.addColorStop(0.4, theme.background);

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"demo/browser.js": { "demo/browser.js": {
"bytes": 25178, "bytes": 25194,
"imports": [ "imports": [
{ {
"path": "dist/human.esm.js" "path": "dist/human.esm.js"
@ -18,7 +18,7 @@
] ]
}, },
"demo/draw.js": { "demo/draw.js": {
"bytes": 10568, "bytes": 10630,
"imports": [] "imports": []
}, },
"demo/gl-bench.js": { "demo/gl-bench.js": {
@ -26,11 +26,11 @@
"imports": [] "imports": []
}, },
"demo/menu.js": { "demo/menu.js": {
"bytes": 13842, "bytes": 13858,
"imports": [] "imports": []
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"bytes": 1774850, "bytes": 1774844,
"imports": [] "imports": []
} }
}, },
@ -38,29 +38,29 @@
"dist/demo-browser-index.js.map": { "dist/demo-browser-index.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 2678894 "bytes": 2679258
}, },
"dist/demo-browser-index.js": { "dist/demo-browser-index.js": {
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytesInOutput": 1767609 "bytesInOutput": 1767603
}, },
"demo/draw.js": { "demo/draw.js": {
"bytesInOutput": 7724 "bytesInOutput": 7776
}, },
"demo/menu.js": { "demo/menu.js": {
"bytesInOutput": 11788 "bytesInOutput": 11800
}, },
"demo/gl-bench.js": { "demo/gl-bench.js": {
"bytesInOutput": 7382 "bytesInOutput": 7382
}, },
"demo/browser.js": { "demo/browser.js": {
"bytesInOutput": 19345 "bytesInOutput": 19361
} }
}, },
"bytes": 1821237 "bytes": 1821311
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

28
dist/human.esm.json vendored
View File

@ -159,7 +159,7 @@
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6993, "bytes": 7009,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": [] "imports": []
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 2720, "bytes": 2740,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
] ]
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 13868, "bytes": 13884,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -228,7 +228,7 @@
] ]
}, },
"src/gesture/gesture.js": { "src/gesture/gesture.js": {
"bytes": 3260, "bytes": 3308,
"imports": [] "imports": []
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
@ -244,7 +244,7 @@
] ]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4276, "bytes": 4252,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
] ]
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7608, "bytes": 7615,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -357,7 +357,7 @@
"dist/human.esm.js.map": { "dist/human.esm.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 2583292 "bytes": 2583490
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -366,7 +366,7 @@
], ],
"inputs": { "inputs": {
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytesInOutput": 5109 "bytesInOutput": 5121
}, },
"src/face/box.js": { "src/face/box.js": {
"bytesInOutput": 1578 "bytesInOutput": 1578
@ -378,10 +378,10 @@
"bytesInOutput": 30817 "bytesInOutput": 30817
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytesInOutput": 9391 "bytesInOutput": 9403
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2108 "bytesInOutput": 2124
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 854 "bytesInOutput": 854
@ -432,10 +432,10 @@
"bytesInOutput": 832 "bytesInOutput": 832
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytesInOutput": 2765 "bytesInOutput": 2741
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytesInOutput": 4691 "bytesInOutput": 4633
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
"bytesInOutput": 127032 "bytesInOutput": 127032
@ -444,7 +444,7 @@
"bytesInOutput": 1914 "bytesInOutput": 1914
}, },
"src/gesture/gesture.js": { "src/gesture/gesture.js": {
"bytesInOutput": 2427 "bytesInOutput": 2463
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 13576 "bytesInOutput": 13576
@ -471,7 +471,7 @@
"bytesInOutput": 21 "bytesInOutput": 21
} }
}, },
"bytes": 1774850 "bytes": 1774844
} }
} }
} }

2
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

28
dist/human.json vendored
View File

@ -159,7 +159,7 @@
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6993, "bytes": 7009,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": [] "imports": []
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 2720, "bytes": 2740,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
] ]
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 13868, "bytes": 13884,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -228,7 +228,7 @@
] ]
}, },
"src/gesture/gesture.js": { "src/gesture/gesture.js": {
"bytes": 3260, "bytes": 3308,
"imports": [] "imports": []
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
@ -244,7 +244,7 @@
] ]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4276, "bytes": 4252,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
] ]
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7608, "bytes": 7615,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -357,14 +357,14 @@
"dist/human.js.map": { "dist/human.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 2599797 "bytes": 2599995
}, },
"dist/human.js": { "dist/human.js": {
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": { "inputs": {
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytesInOutput": 5109 "bytesInOutput": 5121
}, },
"src/face/box.js": { "src/face/box.js": {
"bytesInOutput": 1578 "bytesInOutput": 1578
@ -376,10 +376,10 @@
"bytesInOutput": 30817 "bytesInOutput": 30817
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytesInOutput": 9391 "bytesInOutput": 9403
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2108 "bytesInOutput": 2124
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 854 "bytesInOutput": 854
@ -430,10 +430,10 @@
"bytesInOutput": 832 "bytesInOutput": 832
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytesInOutput": 2765 "bytesInOutput": 2741
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytesInOutput": 4691 "bytesInOutput": 4633
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
"bytesInOutput": 127032 "bytesInOutput": 127032
@ -442,7 +442,7 @@
"bytesInOutput": 1914 "bytesInOutput": 1914
}, },
"src/gesture/gesture.js": { "src/gesture/gesture.js": {
"bytesInOutput": 2427 "bytesInOutput": 2463
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 13576 "bytesInOutput": 13576
@ -469,7 +469,7 @@
"bytesInOutput": 21 "bytesInOutput": 21
} }
}, },
"bytes": 1774928 "bytes": 1774922
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

28
dist/human.node.json vendored
View File

@ -159,7 +159,7 @@
] ]
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytes": 6993, "bytes": 7009,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": [] "imports": []
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytes": 2720, "bytes": 2740,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
] ]
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytes": 13868, "bytes": 13884,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -228,7 +228,7 @@
] ]
}, },
"src/gesture/gesture.js": { "src/gesture/gesture.js": {
"bytes": 3260, "bytes": 3308,
"imports": [] "imports": []
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
@ -244,7 +244,7 @@
] ]
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytes": 4276, "bytes": 4252,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
] ]
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytes": 7608, "bytes": 7615,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js" "path": "dist/tfjs.esm.js"
@ -357,7 +357,7 @@
"dist/human.node-gpu.js.map": { "dist/human.node-gpu.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 686404 "bytes": 686602
}, },
"dist/human.node-gpu.js": { "dist/human.node-gpu.js": {
"imports": [], "imports": [],
@ -367,7 +367,7 @@
"bytesInOutput": 966 "bytesInOutput": 966
}, },
"src/face/blazeface.js": { "src/face/blazeface.js": {
"bytesInOutput": 5264 "bytesInOutput": 5276
}, },
"src/face/box.js": { "src/face/box.js": {
"bytesInOutput": 1628 "bytesInOutput": 1628
@ -379,10 +379,10 @@
"bytesInOutput": 30828 "bytesInOutput": 30828
}, },
"src/face/facepipeline.js": { "src/face/facepipeline.js": {
"bytesInOutput": 9447 "bytesInOutput": 9459
}, },
"src/face/facemesh.js": { "src/face/facemesh.js": {
"bytesInOutput": 2153 "bytesInOutput": 2169
}, },
"src/profile.js": { "src/profile.js": {
"bytesInOutput": 854 "bytesInOutput": 854
@ -433,10 +433,10 @@
"bytesInOutput": 846 "bytesInOutput": 846
}, },
"src/hand/handdetector.js": { "src/hand/handdetector.js": {
"bytesInOutput": 2903 "bytesInOutput": 2879
}, },
"src/hand/handpipeline.js": { "src/hand/handpipeline.js": {
"bytesInOutput": 4735 "bytesInOutput": 4677
}, },
"src/hand/anchors.js": { "src/hand/anchors.js": {
"bytesInOutput": 127034 "bytesInOutput": 127034
@ -445,7 +445,7 @@
"bytesInOutput": 1958 "bytesInOutput": 1958
}, },
"src/gesture/gesture.js": { "src/gesture/gesture.js": {
"bytesInOutput": 2431 "bytesInOutput": 2467
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 13558 "bytesInOutput": 13558
@ -469,7 +469,7 @@
"bytesInOutput": 20 "bytesInOutput": 20
} }
}, },
"bytes": 249843 "bytes": 249837
} }
} }
} }

View File

@ -20,9 +20,7 @@ class ModelWeights {
} }
dispose() { dispose() {
for (const varName in this.variables) { for (let i = 0; i < this.variables.length; i++) this.variables[i].dispose();
this.variables[varName].dispose();
}
} }
} }
exports.ModelWeights = ModelWeights; exports.ModelWeights = ModelWeights;

View File

@ -112,7 +112,7 @@ class BlazeFaceModel {
const scoresVal = scores.dataSync(); const scoresVal = scores.dataSync();
const annotatedBoxes = []; const annotatedBoxes = [];
for (const i in boundingBoxes) { for (let i = 0; i < boundingBoxes.length; i++) {
const boxIndex = boxIndices[i]; const boxIndex = boxIndices[i];
const confidence = scoresVal[boxIndex]; const confidence = scoresVal[boxIndex];
if (confidence > this.config.face.detector.minConfidence) { if (confidence > this.config.face.detector.minConfidence) {

View File

@ -18,7 +18,7 @@ class MediaPipeFaceMesh {
const mesh = prediction.coords ? prediction.coords.arraySync() : null; const mesh = prediction.coords ? prediction.coords.arraySync() : null;
const annotations = {}; const annotations = {};
if (mesh && mesh.length > 0) { if (mesh && mesh.length > 0) {
for (const key in coords.MESH_ANNOTATIONS) { for (let key = 0; key < coords.MESH_ANNOTATIONS.length; key++) {
if (config.face.iris.enabled || key.includes('Iris') === false) { if (config.face.iris.enabled || key.includes('Iris') === false) {
annotations[key] = coords.MESH_ANNOTATIONS[key].map((index) => mesh[index]); annotations[key] = coords.MESH_ANNOTATIONS[key].map((index) => mesh[index]);
} }

View File

@ -156,7 +156,7 @@ class Pipeline {
this.detectedFaces = 0; this.detectedFaces = 0;
return null; return null;
} }
for (const i in this.storedBoxes) { for (let i = 0; i < this.storedBoxes.length; i++) {
const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor); const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);
const enlargedBox = bounding.enlargeBox(scaledBox); const enlargedBox = bounding.enlargeBox(scaledBox);
const landmarks = this.storedBoxes[i].landmarks.arraySync(); const landmarks = this.storedBoxes[i].landmarks.arraySync();

View File

@ -1,7 +1,7 @@
exports.body = (res) => { exports.body = (res) => {
if (!res) return []; if (!res) return [];
const gestures = []; const gestures = [];
for (const i in res) { for (let i = 0; i < res.length; i++) {
// raising hands // raising hands
const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist')); const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist'));
const rightWrist = res[i].keypoints.find((a) => (a.part === 'rightWrist')); const rightWrist = res[i].keypoints.find((a) => (a.part === 'rightWrist'));
@ -21,7 +21,7 @@ exports.body = (res) => {
exports.face = (res) => { exports.face = (res) => {
if (!res) return []; if (!res) return [];
const gestures = []; const gestures = [];
for (const i in res) { for (let i = 0; i < res.length; i++) {
if (res[i].mesh && res[i].mesh.length > 0) { if (res[i].mesh && res[i].mesh.length > 0) {
const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2]; const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2];
if (Math.abs(eyeFacing) < 10) gestures.push({ face: i, gesture: 'facing camera' }); if (Math.abs(eyeFacing) < 10) gestures.push({ face: i, gesture: 'facing camera' });
@ -42,7 +42,7 @@ exports.face = (res) => {
exports.hand = (res) => { exports.hand = (res) => {
if (!res) return []; if (!res) return [];
const gestures = []; const gestures = [];
for (const i in res) { for (let i = 0; i < res.length; i++) {
const fingers = []; const fingers = [];
for (const [finger, pos] of Object.entries(res[i]['annotations'])) { for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger

View File

@ -50,24 +50,24 @@ class HandDetector {
const batched = this.model.predict(input); const batched = this.model.predict(input);
const predictions = batched.squeeze(); const predictions = batched.squeeze();
batched.dispose(); batched.dispose();
const scores = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze()); const scoresT = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());
const scoresVal = scores.dataSync(); const scores = scoresT.dataSync();
const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]); const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);
const boxes = this.normalizeBoxes(rawBoxes); const boxes = this.normalizeBoxes(rawBoxes);
rawBoxes.dispose(); rawBoxes.dispose();
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxHands, config.hand.iouThreshold, config.hand.scoreThreshold); const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxHands, config.hand.iouThreshold, config.hand.scoreThreshold);
const filtered = filteredT.arraySync(); const filtered = filteredT.arraySync();
scores.dispose(); scoresT.dispose();
filteredT.dispose(); filteredT.dispose();
const hands = []; const hands = [];
for (const boxIndex of filtered) { for (const index of filtered) {
if (scoresVal[boxIndex] >= config.hand.minConfidence) { if (scores[index] >= config.hand.minConfidence) {
const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]); const matchingBox = tf.slice(boxes, [index, 0], [1, -1]);
const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]); const rawPalmLandmarks = tf.slice(predictions, [index, 5], [1, 14]);
const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2])); const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, index).reshape([-1, 2]));
rawPalmLandmarks.dispose(); rawPalmLandmarks.dispose();
hands.push({ box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex] }); hands.push({ box: matchingBox, palmLandmarks, confidence: scores[index] });
} }
} }
predictions.dispose(); predictions.dispose();

View File

@ -28,9 +28,9 @@ const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2; const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
class HandPipeline { class HandPipeline {
constructor(boundingBoxDetector, meshDetector, inputSize) { constructor(handDetector, landmarkDetector, inputSize) {
this.boxDetector = boundingBoxDetector; this.handDetector = handDetector;
this.meshDetector = meshDetector; this.landmarkDetector = landmarkDetector;
this.inputSize = inputSize; this.inputSize = inputSize;
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 1000; this.skipped = 1000;
@ -90,23 +90,23 @@ class HandPipeline {
// run new detector every skipFrames unless we only want box to start with // run new detector every skipFrames unless we only want box to start with
let boxes; let boxes;
if ((this.skipped > config.hand.skipFrames) || !config.hand.landmarks || !config.videoOptimized) { if ((this.skipped > config.hand.skipFrames) || !config.hand.landmarks || !config.videoOptimized) {
boxes = await this.boxDetector.estimateHandBounds(image, config); boxes = await this.handDetector.estimateHandBounds(image, config);
// don't reset on test image // don't reset on test image
if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0; if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;
} }
// if detector result count doesn't match current working set, use it to reset current working set // if detector result count doesn't match current working set, use it to reset current working set
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxHands) || !config.hand.landmarks)) { if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxHands) || !config.hand.landmarks)) {
this.storedBoxes = [];
this.detectedHands = 0; this.detectedHands = 0;
for (const possible of boxes) this.storedBoxes.push(possible); this.storedBoxes = [...boxes];
// for (const possible of boxes) this.storedBoxes.push(possible);
if (this.storedBoxes.length > 0) useFreshBox = true; if (this.storedBoxes.length > 0) useFreshBox = true;
} }
const hands = []; const hands = [];
// console.log(`skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`); // console.log(`skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
// go through working set of boxes // go through working set of boxes
for (const i in this.storedBoxes) { for (let i = 0; i < this.storedBoxes.length; i++) {
const currentBox = this.storedBoxes[i]; const currentBox = this.storedBoxes[i];
if (!currentBox) continue; if (!currentBox) continue;
if (config.hand.landmarks) { if (config.hand.landmarks) {
@ -120,11 +120,11 @@ class HandPipeline {
const handImage = croppedInput.div(255); const handImage = croppedInput.div(255);
croppedInput.dispose(); croppedInput.dispose();
rotatedImage.dispose(); rotatedImage.dispose();
const [confidence, keypoints] = await this.meshDetector.predict(handImage); const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage);
handImage.dispose(); handImage.dispose();
const confidenceValue = confidence.dataSync()[0]; const confidence = confidenceT.dataSync()[0];
confidence.dispose(); confidenceT.dispose();
if (confidenceValue >= config.hand.minConfidence) { if (confidence >= config.hand.minConfidence) {
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]); const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync(); const rawCoords = keypointsReshaped.arraySync();
keypoints.dispose(); keypoints.dispose();
@ -134,7 +134,7 @@ class HandPipeline {
this.storedBoxes[i] = nextBoundingBox; this.storedBoxes[i] = nextBoundingBox;
const result = { const result = {
landmarks: coords, landmarks: coords,
confidence: confidenceValue, confidence,
box: { box: {
topLeft: nextBoundingBox.startPoint, topLeft: nextBoundingBox.startPoint,
bottomRight: nextBoundingBox.endPoint, bottomRight: nextBoundingBox.endPoint,

2
wiki

@ -1 +1 @@
Subproject commit bcac4981f7df29e367259caf6b3b73e5ecde6519 Subproject commit 9595a995f7bcf2c6b0d70fed98260c8ab4a6f0d7