mirror of https://github.com/vladmandic/human
major work on handpose model
parent
c47f9d68b3
commit
c28d77534a
|
@ -332,7 +332,7 @@ function setupMenu() {
|
|||
|
||||
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menu.addLabel('Model Parameters');
|
||||
menu.addRange('Max Objects', config.face.detector, 'maxFaces', 0, 50, 1, (val) => {
|
||||
menu.addRange('Max Objects', config.face.detector, 'maxFaces', 1, 50, 1, (val) => {
|
||||
config.face.detector.maxFaces = parseInt(val);
|
||||
config.body.maxDetections = parseInt(val);
|
||||
config.hand.maxHands = parseInt(val);
|
||||
|
|
13
demo/draw.js
13
demo/draw.js
|
@ -154,15 +154,18 @@ async function drawHand(result, canvas, ui) {
|
|||
ctx.stroke();
|
||||
}
|
||||
if (ui.drawPoints) {
|
||||
for (const point of hand.landmarks) {
|
||||
ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)` : ui.baseColor;
|
||||
ctx.beginPath();
|
||||
ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
if (hand.landmarks && hand.landmarks.length > 0) {
|
||||
for (const point of hand.landmarks) {
|
||||
ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)` : ui.baseColor;
|
||||
ctx.beginPath();
|
||||
ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ui.drawPolygons) {
|
||||
const addPart = (part) => {
|
||||
if (!part) return;
|
||||
for (let i = 0; i < part.length; i++) {
|
||||
ctx.lineWidth = ui.baseLineWidth;
|
||||
ctx.beginPath();
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
|
@ -15,7 +15,7 @@
|
|||
]
|
||||
},
|
||||
"demo/draw.js": {
|
||||
"bytes": 7389,
|
||||
"bytes": 7493,
|
||||
"imports": []
|
||||
},
|
||||
"demo/menu.js": {
|
||||
|
@ -23,7 +23,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"bytes": 1274635,
|
||||
"bytes": 1274638,
|
||||
"imports": []
|
||||
}
|
||||
},
|
||||
|
@ -31,19 +31,19 @@
|
|||
"dist/demo-browser-index.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5518726
|
||||
"bytes": 5519617
|
||||
},
|
||||
"dist/demo-browser-index.js": {
|
||||
"imports": [],
|
||||
"inputs": {
|
||||
"dist/human.esm.js": {
|
||||
"bytesInOutput": 1660491
|
||||
"bytesInOutput": 1660540
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"bytesInOutput": 8716
|
||||
},
|
||||
"demo/draw.js": {
|
||||
"bytesInOutput": 7275
|
||||
"bytesInOutput": 7389
|
||||
},
|
||||
"demo/menu.js": {
|
||||
"bytesInOutput": 12356
|
||||
|
@ -52,7 +52,7 @@
|
|||
"bytesInOutput": 15336
|
||||
}
|
||||
},
|
||||
"bytes": 1704296
|
||||
"bytes": 1704459
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -75,7 +75,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/gesture.js": {
|
||||
"bytes": 2122,
|
||||
"bytes": 2175,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
|
@ -87,7 +87,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytes": 4484,
|
||||
"bytes": 4313,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/box.js"
|
||||
|
@ -95,7 +95,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytes": 7959,
|
||||
"bytes": 8657,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/box.js"
|
||||
|
@ -106,7 +106,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 2667,
|
||||
"bytes": 2766,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
|
@ -124,7 +124,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 11168,
|
||||
"bytes": 11165,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/facemesh.js"
|
||||
|
@ -290,7 +290,7 @@
|
|||
"dist/human.esm-nobundle.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 610884
|
||||
"bytes": 611623
|
||||
},
|
||||
"dist/human.esm-nobundle.js": {
|
||||
"imports": [],
|
||||
|
@ -365,22 +365,22 @@
|
|||
"bytesInOutput": 1420
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytesInOutput": 1891
|
||||
"bytesInOutput": 1806
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytesInOutput": 997
|
||||
},
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytesInOutput": 3054
|
||||
"bytesInOutput": 3069
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
"bytesInOutput": 127000
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytesInOutput": 1112
|
||||
"bytesInOutput": 1127
|
||||
},
|
||||
"src/gesture.js": {
|
||||
"bytesInOutput": 1200
|
||||
"bytesInOutput": 1219
|
||||
},
|
||||
"src/imagefx.js": {
|
||||
"bytesInOutput": 11013
|
||||
|
@ -395,13 +395,13 @@
|
|||
"bytesInOutput": 2862
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 5570
|
||||
"bytesInOutput": 5611
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 0
|
||||
}
|
||||
},
|
||||
"bytes": 213089
|
||||
"bytes": 213094
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -236,7 +236,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/gesture.js": {
|
||||
"bytes": 2122,
|
||||
"bytes": 2175,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
|
@ -252,7 +252,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytes": 4484,
|
||||
"bytes": 4313,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -263,7 +263,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytes": 7959,
|
||||
"bytes": 8657,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -277,7 +277,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 2667,
|
||||
"bytes": 2766,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -298,7 +298,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 11168,
|
||||
"bytes": 11165,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -499,7 +499,7 @@
|
|||
"dist/human.esm.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5406665
|
||||
"bytes": 5407404
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"imports": [],
|
||||
|
@ -631,22 +631,22 @@
|
|||
"bytesInOutput": 1398
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytesInOutput": 1900
|
||||
"bytesInOutput": 1812
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytesInOutput": 1005
|
||||
},
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytesInOutput": 3052
|
||||
"bytesInOutput": 3067
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
"bytesInOutput": 127001
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytesInOutput": 1090
|
||||
"bytesInOutput": 1105
|
||||
},
|
||||
"src/gesture.js": {
|
||||
"bytesInOutput": 1201
|
||||
"bytesInOutput": 1220
|
||||
},
|
||||
"src/imagefx.js": {
|
||||
"bytesInOutput": 11014
|
||||
|
@ -661,13 +661,13 @@
|
|||
"bytesInOutput": 2863
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 5583
|
||||
"bytesInOutput": 5625
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 0
|
||||
}
|
||||
},
|
||||
"bytes": 1274635
|
||||
"bytes": 1274638
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -236,7 +236,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/gesture.js": {
|
||||
"bytes": 2122,
|
||||
"bytes": 2175,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
|
@ -252,7 +252,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytes": 4484,
|
||||
"bytes": 4313,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -263,7 +263,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytes": 7959,
|
||||
"bytes": 8657,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -277,7 +277,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 2667,
|
||||
"bytes": 2766,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -298,7 +298,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 11168,
|
||||
"bytes": 11165,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -499,7 +499,7 @@
|
|||
"dist/human.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5406661
|
||||
"bytes": 5407400
|
||||
},
|
||||
"dist/human.js": {
|
||||
"imports": [],
|
||||
|
@ -631,22 +631,22 @@
|
|||
"bytesInOutput": 1398
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytesInOutput": 1900
|
||||
"bytesInOutput": 1812
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytesInOutput": 1005
|
||||
},
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytesInOutput": 3052
|
||||
"bytesInOutput": 3067
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
"bytesInOutput": 127001
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytesInOutput": 1090
|
||||
"bytesInOutput": 1105
|
||||
},
|
||||
"src/gesture.js": {
|
||||
"bytesInOutput": 1201
|
||||
"bytesInOutput": 1220
|
||||
},
|
||||
"src/imagefx.js": {
|
||||
"bytesInOutput": 11014
|
||||
|
@ -661,10 +661,10 @@
|
|||
"bytesInOutput": 2862
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 5621
|
||||
"bytesInOutput": 5663
|
||||
}
|
||||
},
|
||||
"bytes": 1274680
|
||||
"bytes": 1274683
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -75,7 +75,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/gesture.js": {
|
||||
"bytes": 2122,
|
||||
"bytes": 2175,
|
||||
"imports": []
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
|
@ -87,7 +87,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytes": 4484,
|
||||
"bytes": 4313,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/box.js"
|
||||
|
@ -95,7 +95,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytes": 7959,
|
||||
"bytes": 8657,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/box.js"
|
||||
|
@ -106,7 +106,7 @@
|
|||
]
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytes": 2667,
|
||||
"bytes": 2766,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/handpose/handdetector.js"
|
||||
|
@ -124,7 +124,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 11168,
|
||||
"bytes": 11165,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/facemesh.js"
|
||||
|
@ -290,7 +290,7 @@
|
|||
"dist/human.node-nobundle.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 622399
|
||||
"bytes": 623135
|
||||
},
|
||||
"dist/human.node-nobundle.js": {
|
||||
"imports": [],
|
||||
|
@ -365,22 +365,22 @@
|
|||
"bytesInOutput": 1419
|
||||
},
|
||||
"src/handpose/handdetector.js": {
|
||||
"bytesInOutput": 1891
|
||||
"bytesInOutput": 1806
|
||||
},
|
||||
"src/handpose/util.js": {
|
||||
"bytesInOutput": 996
|
||||
},
|
||||
"src/handpose/handpipeline.js": {
|
||||
"bytesInOutput": 3054
|
||||
"bytesInOutput": 3069
|
||||
},
|
||||
"src/handpose/anchors.js": {
|
||||
"bytesInOutput": 127000
|
||||
},
|
||||
"src/handpose/handpose.js": {
|
||||
"bytesInOutput": 1112
|
||||
"bytesInOutput": 1127
|
||||
},
|
||||
"src/gesture.js": {
|
||||
"bytesInOutput": 1200
|
||||
"bytesInOutput": 1219
|
||||
},
|
||||
"src/imagefx.js": {
|
||||
"bytesInOutput": 11013
|
||||
|
@ -398,10 +398,10 @@
|
|||
"bytesInOutput": 28
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 5570
|
||||
"bytesInOutput": 5611
|
||||
}
|
||||
},
|
||||
"bytes": 213096
|
||||
"bytes": 213101
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,9 +37,11 @@ exports.hand = (res) => {
|
|||
for (const [finger, pos] of Object.entries(hand['annotations'])) {
|
||||
if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
||||
}
|
||||
const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));
|
||||
const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));
|
||||
gestures.push(`${closest.name} forward ${highest.name} up`);
|
||||
if (fingers && fingers.length > 0) {
|
||||
const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));
|
||||
const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));
|
||||
gestures.push(`${closest.name} forward ${highest.name} up`);
|
||||
}
|
||||
}
|
||||
return gestures;
|
||||
};
|
||||
|
|
|
@ -21,8 +21,6 @@ const box = require('./box');
|
|||
class HandDetector {
|
||||
constructor(model, inputSize, anchorsAnnotated) {
|
||||
this.model = model;
|
||||
this.width = inputSize;
|
||||
this.height = inputSize;
|
||||
this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);
|
||||
this.anchorsTensor = tf.tensor2d(this.anchors);
|
||||
this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);
|
||||
|
@ -49,16 +47,14 @@ class HandDetector {
|
|||
}
|
||||
|
||||
async getBoundingBoxes(input, config) {
|
||||
const normalizedInput = tf.tidy(() => tf.mul(tf.sub(input, 0.5), 2));
|
||||
const batchedPrediction = this.model.predict(normalizedInput);
|
||||
const batchedPrediction = this.model.predict(input);
|
||||
const prediction = batchedPrediction.squeeze();
|
||||
const scores = tf.tidy(() => tf.sigmoid(tf.slice(prediction, [0, 0], [-1, 1])).squeeze());
|
||||
const rawBoxes = tf.slice(prediction, [0, 1], [-1, 4]);
|
||||
const boxes = this.normalizeBoxes(rawBoxes);
|
||||
const boxesWithHandsTensor = tf.image.nonMaxSuppression(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold);
|
||||
const boxesWithHandsTensor = tf.image.nonMaxSuppression(boxes, scores, config.maxHands, config.iouThreshold, 0.95); // config.scoreThreshold
|
||||
const boxesWithHands = boxesWithHandsTensor.arraySync();
|
||||
const toDispose = [
|
||||
normalizedInput,
|
||||
batchedPrediction,
|
||||
boxesWithHandsTensor,
|
||||
prediction,
|
||||
|
@ -85,22 +81,19 @@ class HandDetector {
|
|||
async estimateHandBounds(input, config) {
|
||||
const inputHeight = input.shape[1];
|
||||
const inputWidth = input.shape[2];
|
||||
const image = tf.tidy(() => input.resizeBilinear([this.width, this.height]).div(255));
|
||||
const image = tf.tidy(() => input.resizeBilinear([config.inputSize, config.inputSize]).div(127.5).sub(1));
|
||||
const predictions = await this.getBoundingBoxes(image, config);
|
||||
if (!predictions || predictions.length === 0) {
|
||||
image.dispose();
|
||||
return null;
|
||||
}
|
||||
image.dispose();
|
||||
if (!predictions || predictions.length === 0) return null;
|
||||
const hands = [];
|
||||
for (const prediction of predictions) {
|
||||
const boundingBoxes = prediction.boxes.arraySync();
|
||||
const startPoint = boundingBoxes[0].slice(0, 2);
|
||||
const endPoint = boundingBoxes[0].slice(2, 4);
|
||||
const boundingBoxes = prediction.boxes.dataSync();
|
||||
const startPoint = boundingBoxes.slice(0, 2);
|
||||
const endPoint = boundingBoxes.slice(2, 4);
|
||||
const palmLandmarks = prediction.palmLandmarks.arraySync();
|
||||
image.dispose();
|
||||
prediction.boxes.dispose();
|
||||
prediction.palmLandmarks.dispose();
|
||||
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [inputWidth / this.width, inputHeight / this.height]));
|
||||
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks }, [inputWidth / config.inputSize, inputHeight / config.inputSize]));
|
||||
}
|
||||
return hands;
|
||||
}
|
||||
|
|
|
@ -35,8 +35,8 @@ class HandPipeline {
|
|||
this.inputSize = inputSize;
|
||||
this.regionsOfInterest = [];
|
||||
this.runsWithoutHandDetector = 0;
|
||||
this.maxHandsNumber = 1;
|
||||
this.skipFrames = 0;
|
||||
this.detectedHands = 0;
|
||||
}
|
||||
|
||||
getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {
|
||||
|
@ -87,12 +87,18 @@ class HandPipeline {
|
|||
|
||||
async estimateHands(image, config) {
|
||||
this.skipFrames = config.skipFrames;
|
||||
const useFreshBox = this.shouldUpdateRegionsOfInterest();
|
||||
// don't need box detection if we have sufficient number of boxes
|
||||
let useFreshBox = (this.detectedHands === 0) || (this.detectedHands !== this.regionsOfInterest.length);
|
||||
let boundingBoxPredictions;
|
||||
// but every skipFrames check if detect boxes number changed
|
||||
if (useFreshBox || this.runsWithoutHandDetector > this.skipFrames) boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config);
|
||||
// if there are new boxes and number of boxes doesn't match use new boxes, but not if maxhands is fixed to 1
|
||||
if (config.maxHands > 1 && boundingBoxPredictions && boundingBoxPredictions.length > 0 && boundingBoxPredictions.length !== this.detectedHands) useFreshBox = true;
|
||||
if (useFreshBox) {
|
||||
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config);
|
||||
this.regionsOfInterest = [];
|
||||
if (!boundingBoxPredictions || boundingBoxPredictions.length === 0) {
|
||||
image.dispose();
|
||||
this.detectedHands = 0;
|
||||
return null;
|
||||
}
|
||||
for (const boundingBoxPrediction of boundingBoxPredictions) {
|
||||
|
@ -121,28 +127,38 @@ class HandPipeline {
|
|||
handImage.dispose();
|
||||
const confidenceValue = confidence.dataSync()[0];
|
||||
confidence.dispose();
|
||||
if (confidenceValue < config.minConfidence) {
|
||||
if (confidenceValue >= config.minConfidence) {
|
||||
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
||||
const rawCoords = keypointsReshaped.arraySync();
|
||||
keypoints.dispose();
|
||||
this.regionsOfInterest[i] = null;
|
||||
return null;
|
||||
keypointsReshaped.dispose();
|
||||
const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords);
|
||||
this.updateRegionsOfInterest(nextBoundingBox, i);
|
||||
const result = {
|
||||
landmarks: coords,
|
||||
handInViewConfidence: confidenceValue,
|
||||
boundingBox: {
|
||||
topLeft: nextBoundingBox.startPoint,
|
||||
bottomRight: nextBoundingBox.endPoint,
|
||||
},
|
||||
};
|
||||
hands.push(result);
|
||||
} else {
|
||||
/*
|
||||
const result = {
|
||||
handInViewConfidence: confidenceValue,
|
||||
boundingBox: {
|
||||
topLeft: currentBox.startPoint,
|
||||
bottomRight: currentBox.endPoint,
|
||||
},
|
||||
};
|
||||
hands.push(result);
|
||||
*/
|
||||
}
|
||||
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
|
||||
const rawCoords = keypointsReshaped.arraySync();
|
||||
keypoints.dispose();
|
||||
keypointsReshaped.dispose();
|
||||
const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords);
|
||||
this.updateRegionsOfInterest(nextBoundingBox, i);
|
||||
const result = {
|
||||
landmarks: coords,
|
||||
handInViewConfidence: confidenceValue,
|
||||
boundingBox: {
|
||||
topLeft: nextBoundingBox.startPoint,
|
||||
bottomRight: nextBoundingBox.endPoint,
|
||||
},
|
||||
};
|
||||
hands.push(result);
|
||||
}
|
||||
this.detectedHands = hands.length;
|
||||
return hands;
|
||||
}
|
||||
|
||||
|
@ -174,10 +190,6 @@ class HandPipeline {
|
|||
}
|
||||
this.regionsOfInterest[i] = iou > UPDATE_REGION_OF_INTEREST_IOU_THRESHOLD ? previousBox : newBox;
|
||||
}
|
||||
|
||||
shouldUpdateRegionsOfInterest() {
|
||||
return !this.regionsOfInterest || (this.regionsOfInterest.length === 0) || (this.runsWithoutHandDetector >= this.skipFrames);
|
||||
}
|
||||
}
|
||||
|
||||
exports.HandPipeline = HandPipeline;
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
* limitations under the License.
|
||||
* =============================================================================
|
||||
*/
|
||||
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
|
||||
|
||||
const tf = require('@tensorflow/tfjs');
|
||||
const handdetector = require('./handdetector');
|
||||
const pipeline = require('./handpipeline');
|
||||
|
@ -43,12 +45,19 @@ class HandPose {
|
|||
const hands = [];
|
||||
for (const prediction of predictions) {
|
||||
const annotations = {};
|
||||
for (const key of Object.keys(MESH_ANNOTATIONS)) {
|
||||
annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
|
||||
if (prediction.landmarks) {
|
||||
for (const key of Object.keys(MESH_ANNOTATIONS)) {
|
||||
annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
|
||||
}
|
||||
}
|
||||
hands.push({
|
||||
confidence: prediction.handInViewConfidence,
|
||||
box: prediction.boundingBox ? [prediction.boundingBox.topLeft[0], prediction.boundingBox.topLeft[1], prediction.boundingBox.bottomRight[0] - prediction.boundingBox.topLeft[0], prediction.boundingBox.bottomRight[1] - prediction.boundingBox.topLeft[1]] : 0,
|
||||
box: prediction.boundingBox ? [
|
||||
prediction.boundingBox.topLeft[0],
|
||||
prediction.boundingBox.topLeft[1],
|
||||
prediction.boundingBox.bottomRight[0] - prediction.boundingBox.topLeft[0],
|
||||
prediction.boundingBox.bottomRight[1] - prediction.boundingBox.topLeft[1],
|
||||
] : 0,
|
||||
landmarks: prediction.landmarks,
|
||||
annotations,
|
||||
});
|
||||
|
|
|
@ -171,7 +171,7 @@ class Human {
|
|||
}
|
||||
// tf.ENV.set('WEBGL_CPU_FORWARD', true);
|
||||
// tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
||||
// tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||
await tf.ready();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue