prototype handtracking

pull/356/head
Vladimir Mandic 2021-09-21 16:48:16 -04:00
parent a5977e3f45
commit 9186e46c57
13 changed files with 1487 additions and 27 deletions

View File

@ -11,10 +11,9 @@
### **HEAD -> main** 2021/09/20 mandic00@live.com
- support for dynamic backend switching
- initial automated browser tests
### **origin/main** 2021/09/20 mandic00@live.com
- enhanced automated test coverage
- more automated tests
- added configuration validation
- prevent validation failed on some model combinations

View File

@ -44,19 +44,19 @@ let userConfig = {
},
face: { enabled: false,
detector: { return: false, rotation: true },
mesh: { enabled: true },
iris: { enabled: true },
mesh: { enabled: false },
iris: { enabled: false },
description: { enabled: false },
emotion: { enabled: false },
},
object: { enabled: false },
gesture: { enabled: true },
hand: { enabled: true },
// hand: { enabled: true, landmarks: false, maxDetected: 3, minConfidence: 0.1 },
hand: { enabled: true, maxDetected: 3, minConfidence: 0.3, detector: { modelPath: 'handtrack.json' } },
body: { enabled: false },
// body: { enabled: true, modelPath: 'movenet-multipose.json' },
// body: { enabled: true, modelPath: 'posenet.json' },
segmentation: { enabled: false },
/*
*/
};

BIN
models/handtrack.bin Normal file

Binary file not shown.

1226
models/handtrack.json Normal file

File diff suppressed because one or more lines are too long

View File

@ -390,10 +390,10 @@ export async function hand(inCanvas: HTMLCanvasElement | OffscreenCanvas, result
if (localOptions.drawLabels) {
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText('hand', h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
ctx.fillText(`${h.label}:${Math.trunc(100 * h.score)}%`, h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText('hand', h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
ctx.fillText(`${h.label}:${Math.trunc(100 * h.score)}%`, h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
}
ctx.stroke();
}
@ -405,9 +405,9 @@ export async function hand(inCanvas: HTMLCanvasElement | OffscreenCanvas, result
}
}
}
if (localOptions.drawLabels) {
if (localOptions.drawLabels && h.annotations) {
const addHandLabel = (part, title) => {
if (!part) return;
if (!part || part.length === 0 || !part[0]) return;
ctx.fillStyle = localOptions.useDepth ? `rgba(${127.5 + (2 * part[part.length - 1][2])}, ${127.5 - (2 * part[part.length - 1][2])}, 255, 0.5)` : localOptions.color;
ctx.fillText(title, part[part.length - 1][0] + 4, part[part.length - 1][1] + 4);
};
@ -419,9 +419,9 @@ export async function hand(inCanvas: HTMLCanvasElement | OffscreenCanvas, result
addHandLabel(h.annotations['thumb'], 'thumb');
addHandLabel(h.annotations['palm'], 'palm');
}
if (localOptions.drawPolygons) {
if (localOptions.drawPolygons && h.annotations) {
const addHandLine = (part) => {
if (!part) return;
if (!part || part.length === 0 || !part[0]) return;
for (let i = 0; i < part.length; i++) {
ctx.beginPath();
ctx.strokeStyle = localOptions.useDepth ? `rgba(${127.5 + (2 * part[i][2])}, ${127.5 - (2 * part[i][2])}, 255, 0.5)` : localOptions.color;

View File

@ -123,8 +123,10 @@ export const hand = (res): GestureResult[] => {
const gestures: Array<{ hand: number, gesture: HandGesture }> = [];
for (let i = 0; i < res.length; i++) {
const fingers: Array<{ name: string, position: number }> = [];
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
if (res[i]['annotations']) {
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
}
}
if (fingers && fingers.length > 0) {
const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));
@ -132,8 +134,10 @@ export const hand = (res): GestureResult[] => {
const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));
gestures.push({ hand: i, gesture: `${highest.name} up` as HandGesture });
}
const poses = fingerPose.match(res[i]['keypoints']);
for (const pose of poses) gestures.push({ hand: i, gesture: pose.name as HandGesture });
if (res[i]['keypoints']) {
const poses = fingerPose.match(res[i]['keypoints']);
for (const pose of poses) gestures.push({ hand: i, gesture: pose.name as HandGesture });
}
}
return gestures;
};

View File

@ -99,7 +99,7 @@ export class HandPipeline {
// for (const possible of boxes) this.storedBoxes.push(possible);
if (this.storedBoxes.length > 0) useFreshBox = true;
}
const hands: Array<{ landmarks: number[], confidence: number, box: { topLeft: number[], bottomRight: number[] } }> = [];
const hands: Array<{ landmarks: number[], confidence: number, boxConfidence: number, fingerConfidence: number, box: { topLeft: number[], bottomRight: number[] } }> = [];
// go through working set of boxes
for (let i = 0; i < this.storedBoxes.length; i++) {
@ -131,6 +131,8 @@ export class HandPipeline {
const result = {
landmarks: coords,
confidence,
boxConfidence: currentBox.confidence,
fingerConfidence: confidence,
box: { topLeft: nextBoundingBox.startPoint, bottomRight: nextBoundingBox.endPoint },
};
hands.push(result);
@ -144,6 +146,8 @@ export class HandPipeline {
const enlarged = box.enlargeBox(box.squarifyBox(currentBox), handBoxEnlargeFactor);
const result = {
confidence: currentBox.confidence,
boxConfidence: currentBox.confidence,
fingerConfidence: 0,
box: { topLeft: enlarged.startPoint, bottomRight: enlarged.endPoint },
landmarks: [],
};

View File

@ -69,6 +69,9 @@ export async function predict(input: Tensor, config: Config): Promise<HandResult
hands.push({
id: i,
score: Math.round(100 * predictions[i].confidence) / 100,
boxScore: Math.round(100 * predictions[i].boxConfidence) / 100,
fingerScore: Math.round(100 * predictions[i].fingerConfidence) / 100,
label: 'hand',
box,
boxRaw,
keypoints,

207
src/handtrack/handtrack.ts Normal file
View File

@ -0,0 +1,207 @@
/**
* Hand Detection and Segmentation
*/
import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js';
import type { HandResult } from '../result';
import type { GraphModel, Tensor } from '../tfjs/types';
import type { Config } from '../config';
import { env } from '../env';
import * as fingerPose from '../fingerpose/fingerpose';
const models: [GraphModel | null, GraphModel | null] = [null, null];
const modelOutputNodes = ['StatefulPartitionedCall/Postprocessor/Slice', 'StatefulPartitionedCall/Postprocessor/ExpandDims_1'];
const inputSize = [0, 0];
const classes = [
'hand',
'fist',
'pinch',
'point',
'face',
'tip',
'pinchtip',
];
let skipped = 0;
let outputSize;
type HandDetectResult = {
id: number,
score: number,
box: [number, number, number, number],
boxRaw: [number, number, number, number],
label: string,
yxBox: [number, number, number, number],
}
let boxes: Array<HandDetectResult> = [];
const fingerMap = {
thumb: [1, 2, 3, 4],
index: [5, 6, 7, 8],
middle: [9, 10, 11, 12],
ring: [13, 14, 15, 16],
pinky: [17, 18, 19, 20],
palm: [0],
};
export async function load(config: Config): Promise<[GraphModel, GraphModel]> {
if (env.initial) {
models[0] = null;
models[1] = null;
}
if (!models[0]) {
models[0] = await tf.loadGraphModel(join(config.modelBasePath, config.hand.detector?.modelPath || '')) as unknown as GraphModel;
const inputs = Object.values(models[0].modelSignature['inputs']);
inputSize[0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
if (!models[0] || !models[0]['modelUrl']) log('load model failed:', config.object.modelPath);
else if (config.debug) log('load model:', models[0]['modelUrl']);
} else if (config.debug) log('cached model:', models[0]['modelUrl']);
if (!models[1]) {
models[1] = await tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton?.modelPath || '')) as unknown as GraphModel;
const inputs = Object.values(models[1].modelSignature['inputs']);
inputSize[1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
if (!models[1] || !models[1]['modelUrl']) log('load model failed:', config.object.modelPath);
else if (config.debug) log('load model:', models[1]['modelUrl']);
} else if (config.debug) log('cached model:', models[1]['modelUrl']);
return models as [GraphModel, GraphModel];
}
async function detectHands(input: Tensor, config: Config): Promise<HandDetectResult[]> {
const hands: HandDetectResult[] = [];
if (!input || !models[0]) return hands;
const t: Record<string, Tensor> = {};
t.resize = tf.image.resizeBilinear(input, [240, 320]); // todo: resize with padding
t.cast = tf.cast(t.resize, 'int32');
[t.rawScores, t.rawBoxes] = await models[0].executeAsync(t.cast, modelOutputNodes) as Tensor[];
t.boxes = tf.squeeze(t.rawBoxes, [0, 2]);
t.scores = tf.squeeze(t.rawScores, [0]);
const classScores = tf.unstack(t.scores, 1);
let id = 0;
for (let i = 0; i < classScores.length; i++) {
if (i !== 0 && i !== 1) continue;
t.nms = await tf.image.nonMaxSuppressionAsync(t.boxes, classScores[i], config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
const nms = await t.nms.data();
tf.dispose(t.nms);
for (const res of Array.from(nms)) { // generates results for each class
const boxSlice = tf.slice(t.boxes, res, 1);
const yxBox = await boxSlice.data();
const boxRaw: [number, number, number, number] = [yxBox[1], yxBox[0], yxBox[3] - yxBox[1], yxBox[2] - yxBox[0]];
const box: [number, number, number, number] = [Math.trunc(boxRaw[0] * outputSize[0]), Math.trunc(boxRaw[1] * outputSize[1]), Math.trunc(boxRaw[2] * outputSize[0]), Math.trunc(boxRaw[3] * outputSize[1])];
tf.dispose(boxSlice);
const scoreSlice = tf.slice(classScores[i], res, 1);
const score = (await scoreSlice.data())[0];
tf.dispose(scoreSlice);
const hand: HandDetectResult = { id: id++, score, box, boxRaw, label: classes[i], yxBox };
hands.push(hand);
}
}
classScores.forEach((tensor) => tf.dispose(tensor));
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
return hands;
}
/*
const scaleFact = 1.2;
function updateBoxes(h, keypoints) {
const fingerX = keypoints.map((pt) => pt[0]);
const fingerY = keypoints.map((pt) => pt[1]);
const minX = Math.min(...fingerX);
const maxX = Math.max(...fingerX);
const minY = Math.min(...fingerY);
const maxY = Math.max(...fingerY);
h.box = [
Math.trunc(minX / scaleFact),
Math.trunc(minY / scaleFact),
Math.trunc(scaleFact * maxX - minX),
Math.trunc(scaleFact * maxY - minY),
] as [number, number, number, number];
h.bowRaw = [
h.box / outputSize[0],
h.box / outputSize[1],
h.box / outputSize[0],
h.box / outputSize[1],
] as [number, number, number, number];
h.yxBox = [
h.boxRaw[1],
h.boxRaw[0],
h.boxRaw[3] + h.boxRaw[1],
h.boxRaw[2] + h.boxRaw[0],
] as [number, number, number, number];
return h;
}
*/
async function detectFingers(input: Tensor, h: HandDetectResult, config: Config): Promise<HandResult> {
const hand: HandResult = {
id: h.id,
score: Math.round(100 * h.score) / 100,
boxScore: Math.round(100 * h.score) / 100,
fingerScore: 0,
box: h.box,
boxRaw: h.boxRaw,
label: h.label,
keypoints: [],
landmarks: {} as HandResult['landmarks'],
annotations: {} as HandResult['annotations'],
};
if (!input || !models[1] || !config.hand.landmarks) return hand;
const t: Record<string, Tensor> = {};
t.crop = tf.image.cropAndResize(input, [h.yxBox], [0], [inputSize[1], inputSize[1]], 'bilinear');
t.cast = tf.cast(t.crop, 'float32');
t.div = tf.div(t.cast, 255);
[t.score, t.keypoints] = models[1].execute(t.div) as Tensor[];
const score = Math.round(100 * (await t.score.data())[0] / 100);
if (score > (config.hand.minConfidence || 0)) {
hand.fingerScore = score;
t.reshaped = tf.reshape(t.keypoints, [-1, 3]);
const rawCoords = await t.reshaped.array() as number[];
hand.keypoints = (rawCoords as number[]).map((coord) => [
(h.box[2] * coord[0] / inputSize[1]) + h.box[0],
(h.box[3] * coord[1] / inputSize[1]) + h.box[1],
(h.box[2] + h.box[3]) / 2 / inputSize[1] * coord[2],
]);
// h = updateBoxes(h, hand.keypoints); // replace detected box with box calculated around keypoints
hand.landmarks = fingerPose.analyze(hand.keypoints) as HandResult['landmarks']; // calculate finger landmarks
for (const key of Object.keys(fingerMap)) { // map keypoints to per-finger annotations
hand.annotations[key] = fingerMap[key].map((index) => (hand.landmarks && hand.keypoints[index] ? hand.keypoints[index] : null));
}
}
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
return hand;
}
let last = 0;
export async function predict(input: Tensor, config: Config): Promise<HandResult[]> {
outputSize = [input.shape[2] || 0, input.shape[1] || 0];
if ((skipped < (config.object.skipFrames || 0)) && config.skipFrame) {
// use cached boxes
skipped++;
const hands: HandResult[] = await Promise.all(boxes.map((hand) => detectFingers(input, hand, config)));
const withFingers = hands.filter((hand) => hand.fingerScore > 0).length;
if (withFingers === last) return hands;
}
// calculate new boxes
skipped = 0;
boxes = await detectHands(input, config);
const hands: HandResult[] = await Promise.all(boxes.map((hand) => detectFingers(input, hand, config)));
const withFingers = hands.filter((hand) => hand.fingerScore > 0).length;
last = withFingers;
// console.log('NEW', withFingers, hands.length, boxes.length);
return hands;
}
/*
<https://victordibia.com/handtrack.js/#/>
<https://github.com/victordibia/handtrack.js/>
<https://github.com/victordibia/handtracking>
<https://medium.com/@victor.dibia/how-to-build-a-real-time-hand-detector-using-neural-networks-ssd-on-tensorflow-d6bac0e4b2ce>
*/
/* TODO
- smart resize
- updateboxes is drifting
*/

View File

@ -11,6 +11,7 @@ import * as face from './face';
import * as facemesh from './blazeface/facemesh';
import * as faceres from './faceres/faceres';
import * as posenet from './posenet/posenet';
import * as handtrack from './handtrack/handtrack';
import * as handpose from './handpose/handpose';
import * as blazepose from './blazepose/blazepose';
import * as efficientpose from './efficientpose/efficientpose';
@ -125,6 +126,7 @@ export class Human {
efficientpose: GraphModel | null,
movenet: GraphModel | null,
handpose: [GraphModel | null, GraphModel | null] | null,
handtrack: [GraphModel | null, GraphModel | null] | null,
age: GraphModel | null,
gender: GraphModel | null,
emotion: GraphModel | null,
@ -188,6 +190,7 @@ export class Human {
this.models = {
face: null, // array of models
handpose: null, // array of models
handtrack: null, // array of models
posenet: null,
blazepose: null,
efficientpose: null,
@ -508,11 +511,13 @@ export class Human {
this.analyze('Start Hand:');
this.state = 'detect:hand';
if (this.config.async) {
handRes = this.config.hand.enabled ? handpose.predict(img.tensor, this.config) : [];
if (this.config.hand.detector?.modelPath?.includes('handdetect')) handRes = this.config.hand.enabled ? handpose.predict(img.tensor, this.config) : [];
else if (this.config.hand.detector?.modelPath?.includes('handtrack')) handRes = this.config.hand.enabled ? handtrack.predict(img.tensor, this.config) : [];
if (this.performance.hand) delete this.performance.hand;
} else {
timeStamp = now();
handRes = this.config.hand.enabled ? await handpose.predict(img.tensor, this.config) : [];
if (this.config.hand.detector?.modelPath?.includes('handdetect')) handRes = this.config.hand.enabled ? await handpose.predict(img.tensor, this.config) : [];
else if (this.config.hand.detector?.modelPath?.includes('handtrack')) handRes = this.config.hand.enabled ? await handtrack.predict(img.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.performance.hand = elapsedTime;
}

View File

@ -59,15 +59,19 @@ export function calc(newResult: Result): Result {
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / bufferedFactor)) as [number, number, number, number];
const boxRaw = (newResult.hand[i].boxRaw // update boxRaw
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / bufferedFactor)) as [number, number, number, number];
const keypoints = newResult.hand[i].keypoints ? newResult.hand[i].keypoints // update landmarks
if (bufferedResult.hand[i].keypoints.length !== newResult.hand[i].keypoints.length) bufferedResult.hand[i].keypoints = newResult.hand[i].keypoints; // reset keypoints as previous frame did not have them
const keypoints = newResult.hand[i].keypoints && newResult.hand[i].keypoints.length > 0 ? newResult.hand[i].keypoints // update landmarks
.map((landmark, j) => landmark
.map((coord, k) => (((bufferedFactor - 1) * bufferedResult.hand[i].keypoints[j][k] + coord) / bufferedFactor)) as [number, number, number])
: [];
const keys = Object.keys(newResult.hand[i].annotations); // update annotations
const annotations = {};
for (const key of keys) {
annotations[key] = newResult.hand[i].annotations[key]
.map((val, j) => val.map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / bufferedFactor));
if (Object.keys(bufferedResult.hand[i].annotations).length !== Object.keys(newResult.hand[i].annotations).length) bufferedResult.hand[i].annotations = newResult.hand[i].annotations; // reset annotations as previous frame did not have them
if (newResult.hand[i].annotations) {
for (const key of Object.keys(newResult.hand[i].annotations)) { // update annotations
annotations[key] = newResult.hand[i].annotations[key] && newResult.hand[i].annotations[key][0]
? newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / bufferedFactor))
: null;
}
}
bufferedResult.hand[i] = { ...newResult.hand[i], box, boxRaw, keypoints, annotations: annotations as HandResult['annotations'] }; // shallow clone plus updated values
}

View File

@ -5,6 +5,7 @@ import * as faceres from './faceres/faceres';
import * as emotion from './emotion/emotion';
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
import * as handtrack from './handtrack/handtrack';
import * as blazepose from './blazepose/blazepose';
import * as efficientpose from './efficientpose/efficientpose';
import * as movenet from './movenet/movenet';
@ -19,6 +20,7 @@ export function reset(instance) {
instance.models = {
face: null, // array of models
handpose: null, // array of models
handtrack: null, // array of models
posenet: null,
blazepose: null,
efficientpose: null,
@ -42,6 +44,7 @@ export async function load(instance) {
instance.models.face,
instance.models.emotion,
instance.models.handpose,
instance.models.handtrack,
instance.models.posenet,
instance.models.blazepose,
instance.models.efficientpose,
@ -54,7 +57,8 @@ export async function load(instance) {
] = await Promise.all([
instance.models.face || (instance.config.face.enabled ? facemesh.load(instance.config) : null),
instance.models.emotion || ((instance.config.face.enabled && instance.config.face.emotion.enabled) ? emotion.load(instance.config) : null),
instance.models.handpose || (instance.config.hand.enabled ? handpose.load(instance.config) : null),
instance.models.handpose || (instance.config.hand.enabled && instance.config.hand.detector.modelPath.includes('handdetect') ? handpose.load(instance.config) : null),
instance.models.handtrack || (instance.config.hand.enabled && instance.config.hand.detector.modelPath.includes('handtrack') ? handtrack.load(instance.config) : null),
instance.models.posenet || (instance.config.body.enabled && instance.config.body.modelPath.includes('posenet') ? posenet.load(instance.config) : null),
instance.models.blazepose || (instance.config.body.enabled && instance.config.body.modelPath.includes('blazepose') ? blazepose.load(instance.config) : null),
instance.models.efficientpose || (instance.config.body.enabled && instance.config.body.modelPath.includes('efficientpose') ? efficientpose.load(instance.config) : null),
@ -68,7 +72,8 @@ export async function load(instance) {
} else { // load models sequentially
if (instance.config.face.enabled && !instance.models.face) instance.models.face = await facemesh.load(instance.config);
if (instance.config.face.enabled && instance.config.face.emotion.enabled && !instance.models.emotion) instance.models.emotion = await emotion.load(instance.config);
if (instance.config.hand.enabled && !instance.models.handpose) instance.models.handpose = await handpose.load(instance.config);
if (instance.config.hand.enabled && !instance.models.handpose && instance.config.hand.detector.modelPath.includes('handdetect')) instance.models.handpose = await handpose.load(instance.config);
if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector.modelPath.includes('handtrack')) instance.models.handtrack = await handtrack.load(instance.config);
if (instance.config.body.enabled && !instance.models.posenet && instance.config.body.modelPath.includes('posenet')) instance.models.posenet = await posenet.load(instance.config);
if (instance.config.body.enabled && !instance.models.blazepose && instance.config.body.modelPath.includes('blazepose')) instance.models.blazepose = await blazepose.load(instance.config);
if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body.modelPath.includes('efficientpose')) instance.models.efficientpose = await blazepose.load(instance.config);

View File

@ -97,9 +97,12 @@ export interface BodyResult {
export interface HandResult {
id: number,
score: number,
boxScore: number,
fingerScore: number,
box: [number, number, number, number],
boxRaw: [number, number, number, number],
keypoints: Array<[number, number, number]>,
label: string,
annotations: Record<
'index' | 'middle' | 'pinky' | 'ring' | 'thumb' | 'palm',
Array<[number, number, number]>