major work on handpose model

pull/50/head
Vladimir Mandic 2020-11-04 14:59:30 -05:00
parent c47f9d68b3
commit c28d77534a
24 changed files with 566 additions and 543 deletions

View File

@ -332,7 +332,7 @@ function setupMenu() {
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">'); menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addLabel('Model Parameters'); menu.addLabel('Model Parameters');
menu.addRange('Max Objects', config.face.detector, 'maxFaces', 0, 50, 1, (val) => { menu.addRange('Max Objects', config.face.detector, 'maxFaces', 1, 50, 1, (val) => {
config.face.detector.maxFaces = parseInt(val); config.face.detector.maxFaces = parseInt(val);
config.body.maxDetections = parseInt(val); config.body.maxDetections = parseInt(val);
config.hand.maxHands = parseInt(val); config.hand.maxHands = parseInt(val);

View File

@ -154,6 +154,7 @@ async function drawHand(result, canvas, ui) {
ctx.stroke(); ctx.stroke();
} }
if (ui.drawPoints) { if (ui.drawPoints) {
if (hand.landmarks && hand.landmarks.length > 0) {
for (const point of hand.landmarks) { for (const point of hand.landmarks) {
ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)` : ui.baseColor; ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)` : ui.baseColor;
ctx.beginPath(); ctx.beginPath();
@ -161,8 +162,10 @@ async function drawHand(result, canvas, ui) {
ctx.fill(); ctx.fill();
} }
} }
}
if (ui.drawPolygons) { if (ui.drawPolygons) {
const addPart = (part) => { const addPart = (part) => {
if (!part) return;
for (let i = 0; i < part.length; i++) { for (let i = 0; i < part.length; i++) {
ctx.lineWidth = ui.baseLineWidth; ctx.lineWidth = ui.baseLineWidth;
ctx.beginPath(); ctx.beginPath();

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -15,7 +15,7 @@
] ]
}, },
"demo/draw.js": { "demo/draw.js": {
"bytes": 7389, "bytes": 7493,
"imports": [] "imports": []
}, },
"demo/menu.js": { "demo/menu.js": {
@ -23,7 +23,7 @@
"imports": [] "imports": []
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"bytes": 1274635, "bytes": 1274638,
"imports": [] "imports": []
} }
}, },
@ -31,19 +31,19 @@
"dist/demo-browser-index.js.map": { "dist/demo-browser-index.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 5518726 "bytes": 5519617
}, },
"dist/demo-browser-index.js": { "dist/demo-browser-index.js": {
"imports": [], "imports": [],
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytesInOutput": 1660491 "bytesInOutput": 1660540
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"bytesInOutput": 8716 "bytesInOutput": 8716
}, },
"demo/draw.js": { "demo/draw.js": {
"bytesInOutput": 7275 "bytesInOutput": 7389
}, },
"demo/menu.js": { "demo/menu.js": {
"bytesInOutput": 12356 "bytesInOutput": 12356
@ -52,7 +52,7 @@
"bytesInOutput": 15336 "bytesInOutput": 15336
} }
}, },
"bytes": 1704296 "bytes": 1704459
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -75,7 +75,7 @@
"imports": [] "imports": []
}, },
"src/gesture.js": { "src/gesture.js": {
"bytes": 2122, "bytes": 2175,
"imports": [] "imports": []
}, },
"src/handpose/anchors.js": { "src/handpose/anchors.js": {
@ -87,7 +87,7 @@
"imports": [] "imports": []
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytes": 4484, "bytes": 4313,
"imports": [ "imports": [
{ {
"path": "src/handpose/box.js" "path": "src/handpose/box.js"
@ -95,7 +95,7 @@
] ]
}, },
"src/handpose/handpipeline.js": { "src/handpose/handpipeline.js": {
"bytes": 7959, "bytes": 8657,
"imports": [ "imports": [
{ {
"path": "src/handpose/box.js" "path": "src/handpose/box.js"
@ -106,7 +106,7 @@
] ]
}, },
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytes": 2667, "bytes": 2766,
"imports": [ "imports": [
{ {
"path": "src/handpose/handdetector.js" "path": "src/handpose/handdetector.js"
@ -124,7 +124,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 11168, "bytes": 11165,
"imports": [ "imports": [
{ {
"path": "src/facemesh/facemesh.js" "path": "src/facemesh/facemesh.js"
@ -290,7 +290,7 @@
"dist/human.esm-nobundle.js.map": { "dist/human.esm-nobundle.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 610884 "bytes": 611623
}, },
"dist/human.esm-nobundle.js": { "dist/human.esm-nobundle.js": {
"imports": [], "imports": [],
@ -365,22 +365,22 @@
"bytesInOutput": 1420 "bytesInOutput": 1420
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytesInOutput": 1891 "bytesInOutput": 1806
}, },
"src/handpose/util.js": { "src/handpose/util.js": {
"bytesInOutput": 997 "bytesInOutput": 997
}, },
"src/handpose/handpipeline.js": { "src/handpose/handpipeline.js": {
"bytesInOutput": 3054 "bytesInOutput": 3069
}, },
"src/handpose/anchors.js": { "src/handpose/anchors.js": {
"bytesInOutput": 127000 "bytesInOutput": 127000
}, },
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytesInOutput": 1112 "bytesInOutput": 1127
}, },
"src/gesture.js": { "src/gesture.js": {
"bytesInOutput": 1200 "bytesInOutput": 1219
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 11013 "bytesInOutput": 11013
@ -395,13 +395,13 @@
"bytesInOutput": 2862 "bytesInOutput": 2862
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 5570 "bytesInOutput": 5611
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 0 "bytesInOutput": 0
} }
}, },
"bytes": 213089 "bytes": 213094
} }
} }
} }

66
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

24
dist/human.esm.json vendored
View File

@ -236,7 +236,7 @@
"imports": [] "imports": []
}, },
"src/gesture.js": { "src/gesture.js": {
"bytes": 2122, "bytes": 2175,
"imports": [] "imports": []
}, },
"src/handpose/anchors.js": { "src/handpose/anchors.js": {
@ -252,7 +252,7 @@
] ]
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytes": 4484, "bytes": 4313,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -263,7 +263,7 @@
] ]
}, },
"src/handpose/handpipeline.js": { "src/handpose/handpipeline.js": {
"bytes": 7959, "bytes": 8657,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -277,7 +277,7 @@
] ]
}, },
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytes": 2667, "bytes": 2766,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -298,7 +298,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 11168, "bytes": 11165,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -499,7 +499,7 @@
"dist/human.esm.js.map": { "dist/human.esm.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 5406665 "bytes": 5407404
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -631,22 +631,22 @@
"bytesInOutput": 1398 "bytesInOutput": 1398
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytesInOutput": 1900 "bytesInOutput": 1812
}, },
"src/handpose/util.js": { "src/handpose/util.js": {
"bytesInOutput": 1005 "bytesInOutput": 1005
}, },
"src/handpose/handpipeline.js": { "src/handpose/handpipeline.js": {
"bytesInOutput": 3052 "bytesInOutput": 3067
}, },
"src/handpose/anchors.js": { "src/handpose/anchors.js": {
"bytesInOutput": 127001 "bytesInOutput": 127001
}, },
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytesInOutput": 1090 "bytesInOutput": 1105
}, },
"src/gesture.js": { "src/gesture.js": {
"bytesInOutput": 1201 "bytesInOutput": 1220
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 11014 "bytesInOutput": 11014
@ -661,13 +661,13 @@
"bytesInOutput": 2863 "bytesInOutput": 2863
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 5583 "bytesInOutput": 5625
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 0 "bytesInOutput": 0
} }
}, },
"bytes": 1274635 "bytes": 1274638
} }
} }
} }

66
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

24
dist/human.json vendored
View File

@ -236,7 +236,7 @@
"imports": [] "imports": []
}, },
"src/gesture.js": { "src/gesture.js": {
"bytes": 2122, "bytes": 2175,
"imports": [] "imports": []
}, },
"src/handpose/anchors.js": { "src/handpose/anchors.js": {
@ -252,7 +252,7 @@
] ]
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytes": 4484, "bytes": 4313,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -263,7 +263,7 @@
] ]
}, },
"src/handpose/handpipeline.js": { "src/handpose/handpipeline.js": {
"bytes": 7959, "bytes": 8657,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -277,7 +277,7 @@
] ]
}, },
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytes": 2667, "bytes": 2766,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -298,7 +298,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 11168, "bytes": 11165,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -499,7 +499,7 @@
"dist/human.js.map": { "dist/human.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 5406661 "bytes": 5407400
}, },
"dist/human.js": { "dist/human.js": {
"imports": [], "imports": [],
@ -631,22 +631,22 @@
"bytesInOutput": 1398 "bytesInOutput": 1398
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytesInOutput": 1900 "bytesInOutput": 1812
}, },
"src/handpose/util.js": { "src/handpose/util.js": {
"bytesInOutput": 1005 "bytesInOutput": 1005
}, },
"src/handpose/handpipeline.js": { "src/handpose/handpipeline.js": {
"bytesInOutput": 3052 "bytesInOutput": 3067
}, },
"src/handpose/anchors.js": { "src/handpose/anchors.js": {
"bytesInOutput": 127001 "bytesInOutput": 127001
}, },
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytesInOutput": 1090 "bytesInOutput": 1105
}, },
"src/gesture.js": { "src/gesture.js": {
"bytesInOutput": 1201 "bytesInOutput": 1220
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 11014 "bytesInOutput": 11014
@ -661,10 +661,10 @@
"bytesInOutput": 2862 "bytesInOutput": 2862
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 5621 "bytesInOutput": 5663
} }
}, },
"bytes": 1274680 "bytes": 1274683
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

66
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

24
dist/human.node.json vendored
View File

@ -75,7 +75,7 @@
"imports": [] "imports": []
}, },
"src/gesture.js": { "src/gesture.js": {
"bytes": 2122, "bytes": 2175,
"imports": [] "imports": []
}, },
"src/handpose/anchors.js": { "src/handpose/anchors.js": {
@ -87,7 +87,7 @@
"imports": [] "imports": []
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytes": 4484, "bytes": 4313,
"imports": [ "imports": [
{ {
"path": "src/handpose/box.js" "path": "src/handpose/box.js"
@ -95,7 +95,7 @@
] ]
}, },
"src/handpose/handpipeline.js": { "src/handpose/handpipeline.js": {
"bytes": 7959, "bytes": 8657,
"imports": [ "imports": [
{ {
"path": "src/handpose/box.js" "path": "src/handpose/box.js"
@ -106,7 +106,7 @@
] ]
}, },
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytes": 2667, "bytes": 2766,
"imports": [ "imports": [
{ {
"path": "src/handpose/handdetector.js" "path": "src/handpose/handdetector.js"
@ -124,7 +124,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 11168, "bytes": 11165,
"imports": [ "imports": [
{ {
"path": "src/facemesh/facemesh.js" "path": "src/facemesh/facemesh.js"
@ -290,7 +290,7 @@
"dist/human.node-nobundle.js.map": { "dist/human.node-nobundle.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 622399 "bytes": 623135
}, },
"dist/human.node-nobundle.js": { "dist/human.node-nobundle.js": {
"imports": [], "imports": [],
@ -365,22 +365,22 @@
"bytesInOutput": 1419 "bytesInOutput": 1419
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytesInOutput": 1891 "bytesInOutput": 1806
}, },
"src/handpose/util.js": { "src/handpose/util.js": {
"bytesInOutput": 996 "bytesInOutput": 996
}, },
"src/handpose/handpipeline.js": { "src/handpose/handpipeline.js": {
"bytesInOutput": 3054 "bytesInOutput": 3069
}, },
"src/handpose/anchors.js": { "src/handpose/anchors.js": {
"bytesInOutput": 127000 "bytesInOutput": 127000
}, },
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytesInOutput": 1112 "bytesInOutput": 1127
}, },
"src/gesture.js": { "src/gesture.js": {
"bytesInOutput": 1200 "bytesInOutput": 1219
}, },
"src/imagefx.js": { "src/imagefx.js": {
"bytesInOutput": 11013 "bytesInOutput": 11013
@ -398,10 +398,10 @@
"bytesInOutput": 28 "bytesInOutput": 28
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 5570 "bytesInOutput": 5611
} }
}, },
"bytes": 213096 "bytes": 213101
} }
} }
} }

View File

@ -37,9 +37,11 @@ exports.hand = (res) => {
for (const [finger, pos] of Object.entries(hand['annotations'])) { for (const [finger, pos] of Object.entries(hand['annotations'])) {
if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
} }
if (fingers && fingers.length > 0) {
const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a)); const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));
const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a)); const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));
gestures.push(`${closest.name} forward ${highest.name} up`); gestures.push(`${closest.name} forward ${highest.name} up`);
} }
}
return gestures; return gestures;
}; };

View File

@ -21,8 +21,6 @@ const box = require('./box');
class HandDetector { class HandDetector {
constructor(model, inputSize, anchorsAnnotated) { constructor(model, inputSize, anchorsAnnotated) {
this.model = model; this.model = model;
this.width = inputSize;
this.height = inputSize;
this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]); this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);
this.anchorsTensor = tf.tensor2d(this.anchors); this.anchorsTensor = tf.tensor2d(this.anchors);
this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]); this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);
@ -49,16 +47,14 @@ class HandDetector {
} }
async getBoundingBoxes(input, config) { async getBoundingBoxes(input, config) {
const normalizedInput = tf.tidy(() => tf.mul(tf.sub(input, 0.5), 2)); const batchedPrediction = this.model.predict(input);
const batchedPrediction = this.model.predict(normalizedInput);
const prediction = batchedPrediction.squeeze(); const prediction = batchedPrediction.squeeze();
const scores = tf.tidy(() => tf.sigmoid(tf.slice(prediction, [0, 0], [-1, 1])).squeeze()); const scores = tf.tidy(() => tf.sigmoid(tf.slice(prediction, [0, 0], [-1, 1])).squeeze());
const rawBoxes = tf.slice(prediction, [0, 1], [-1, 4]); const rawBoxes = tf.slice(prediction, [0, 1], [-1, 4]);
const boxes = this.normalizeBoxes(rawBoxes); const boxes = this.normalizeBoxes(rawBoxes);
const boxesWithHandsTensor = tf.image.nonMaxSuppression(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold); const boxesWithHandsTensor = tf.image.nonMaxSuppression(boxes, scores, config.maxHands, config.iouThreshold, 0.95); // config.scoreThreshold
const boxesWithHands = boxesWithHandsTensor.arraySync(); const boxesWithHands = boxesWithHandsTensor.arraySync();
const toDispose = [ const toDispose = [
normalizedInput,
batchedPrediction, batchedPrediction,
boxesWithHandsTensor, boxesWithHandsTensor,
prediction, prediction,
@ -85,22 +81,19 @@ class HandDetector {
async estimateHandBounds(input, config) { async estimateHandBounds(input, config) {
const inputHeight = input.shape[1]; const inputHeight = input.shape[1];
const inputWidth = input.shape[2]; const inputWidth = input.shape[2];
const image = tf.tidy(() => input.resizeBilinear([this.width, this.height]).div(255)); const image = tf.tidy(() => input.resizeBilinear([config.inputSize, config.inputSize]).div(127.5).sub(1));
const predictions = await this.getBoundingBoxes(image, config); const predictions = await this.getBoundingBoxes(image, config);
if (!predictions || predictions.length === 0) {
image.dispose(); image.dispose();
return null; if (!predictions || predictions.length === 0) return null;
}
const hands = []; const hands = [];
for (const prediction of predictions) { for (const prediction of predictions) {
const boundingBoxes = prediction.boxes.arraySync(); const boundingBoxes = prediction.boxes.dataSync();
const startPoint = boundingBoxes[0].slice(0, 2); const startPoint = boundingBoxes.slice(0, 2);
const endPoint = boundingBoxes[0].slice(2, 4); const endPoint = boundingBoxes.slice(2, 4);
const palmLandmarks = prediction.palmLandmarks.arraySync(); const palmLandmarks = prediction.palmLandmarks.arraySync();
image.dispose();
prediction.boxes.dispose(); prediction.boxes.dispose();
prediction.palmLandmarks.dispose(); prediction.palmLandmarks.dispose();
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [inputWidth / this.width, inputHeight / this.height])); hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [inputWidth / config.inputSize, inputHeight / config.inputSize]));
} }
return hands; return hands;
} }

View File

@ -35,8 +35,8 @@ class HandPipeline {
this.inputSize = inputSize; this.inputSize = inputSize;
this.regionsOfInterest = []; this.regionsOfInterest = [];
this.runsWithoutHandDetector = 0; this.runsWithoutHandDetector = 0;
this.maxHandsNumber = 1;
this.skipFrames = 0; this.skipFrames = 0;
this.detectedHands = 0;
} }
getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) { getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {
@ -87,12 +87,18 @@ class HandPipeline {
async estimateHands(image, config) { async estimateHands(image, config) {
this.skipFrames = config.skipFrames; this.skipFrames = config.skipFrames;
const useFreshBox = this.shouldUpdateRegionsOfInterest(); // don't need box detection if we have sufficient number of boxes
let useFreshBox = (this.detectedHands === 0) || (this.detectedHands !== this.regionsOfInterest.length);
let boundingBoxPredictions;
// but every skipFrames check if detect boxes number changed
if (useFreshBox || this.runsWithoutHandDetector > this.skipFrames) boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config);
// if there are new boxes and number of boxes doesn't match use new boxes, but not if maxhands is fixed to 1
if (config.maxHands > 1 && boundingBoxPredictions && boundingBoxPredictions.length > 0 && boundingBoxPredictions.length !== this.detectedHands) useFreshBox = true;
if (useFreshBox) { if (useFreshBox) {
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config);
this.regionsOfInterest = []; this.regionsOfInterest = [];
if (!boundingBoxPredictions || boundingBoxPredictions.length === 0) { if (!boundingBoxPredictions || boundingBoxPredictions.length === 0) {
image.dispose(); image.dispose();
this.detectedHands = 0;
return null; return null;
} }
for (const boundingBoxPrediction of boundingBoxPredictions) { for (const boundingBoxPrediction of boundingBoxPredictions) {
@ -121,11 +127,7 @@ class HandPipeline {
handImage.dispose(); handImage.dispose();
const confidenceValue = confidence.dataSync()[0]; const confidenceValue = confidence.dataSync()[0];
confidence.dispose(); confidence.dispose();
if (confidenceValue < config.minConfidence) { if (confidenceValue >= config.minConfidence) {
keypoints.dispose();
this.regionsOfInterest[i] = null;
return null;
}
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]); const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync(); const rawCoords = keypointsReshaped.arraySync();
keypoints.dispose(); keypoints.dispose();
@ -142,7 +144,21 @@ class HandPipeline {
}, },
}; };
hands.push(result); hands.push(result);
} else {
/*
const result = {
handInViewConfidence: confidenceValue,
boundingBox: {
topLeft: currentBox.startPoint,
bottomRight: currentBox.endPoint,
},
};
hands.push(result);
*/
} }
keypoints.dispose();
}
this.detectedHands = hands.length;
return hands; return hands;
} }
@ -174,10 +190,6 @@ class HandPipeline {
} }
this.regionsOfInterest[i] = iou > UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD ? previousBox : newBox; this.regionsOfInterest[i] = iou > UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD ? previousBox : newBox;
} }
shouldUpdateRegionsOfInterest() {
return !this.regionsOfInterest || (this.regionsOfInterest.length === 0) || (this.runsWithoutHandDetector >= this.skipFrames);
}
} }
exports.HandPipeline = HandPipeline; exports.HandPipeline = HandPipeline;

View File

@ -14,6 +14,8 @@
* limitations under the License. * limitations under the License.
* ============================================================================= * =============================================================================
*/ */
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
const tf = require('@tensorflow/tfjs'); const tf = require('@tensorflow/tfjs');
const handdetector = require('./handdetector'); const handdetector = require('./handdetector');
const pipeline = require('./handpipeline'); const pipeline = require('./handpipeline');
@ -43,12 +45,19 @@ class HandPose {
const hands = []; const hands = [];
for (const prediction of predictions) { for (const prediction of predictions) {
const annotations = {}; const annotations = {};
if (prediction.landmarks) {
for (const key of Object.keys(MESH_ANNOTATIONS)) { for (const key of Object.keys(MESH_ANNOTATIONS)) {
annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]); annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
} }
}
hands.push({ hands.push({
confidence: prediction.handInViewConfidence, confidence: prediction.handInViewConfidence,
box: prediction.boundingBox ? [prediction.boundingBox.topLeft[0], prediction.boundingBox.topLeft[1], prediction.boundingBox.bottomRight[0] - prediction.boundingBox.topLeft[0], prediction.boundingBox.bottomRight[1] - prediction.boundingBox.topLeft[1]] : 0, box: prediction.boundingBox ? [
prediction.boundingBox.topLeft[0],
prediction.boundingBox.topLeft[1],
prediction.boundingBox.bottomRight[0] - prediction.boundingBox.topLeft[0],
prediction.boundingBox.bottomRight[1] - prediction.boundingBox.topLeft[1],
] : 0,
landmarks: prediction.landmarks, landmarks: prediction.landmarks,
annotations, annotations,
}); });

View File

@ -171,7 +171,7 @@ class Human {
} }
// tf.ENV.set('WEBGL_CPU_FORWARD', true); // tf.ENV.set('WEBGL_CPU_FORWARD', true);
// tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
// tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true); tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
await tf.ready(); await tf.ready();
} }
} }