update hand detector processing algorithm

pull/356/head
Vladimir Mandic 2021-09-02 08:50:16 -04:00
parent d5abaf2405
commit 296501cbf8
10 changed files with 46 additions and 42 deletions

View File

@ -11,9 +11,8 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
### **HEAD -> main** 2021/08/31 mandic00@live.com ### **HEAD -> main** 2021/08/31 mandic00@live.com
- simplify canvas handling in nodejs
### **origin/main** 2021/08/31 mandic00@live.com - full rebuild
### **2.1.5** 2021/08/31 mandic00@live.com ### **2.1.5** 2021/08/31 mandic00@live.com

View File

@ -35,7 +35,7 @@ let userConfig = {
/* /*
wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/', wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.9.0/dist/',
async: false, async: false,
cacheSensitivity: 0, cacheSensitivity: 0.75,
filter: { filter: {
enabled: false, enabled: false,
flip: false, flip: false,
@ -49,11 +49,12 @@ let userConfig = {
}, },
object: { enabled: false }, object: { enabled: false },
gesture: { enabled: true }, gesture: { enabled: true },
hand: { enabled: false }, hand: { enabled: true },
body: { enabled: false }, body: { enabled: false },
// body: { enabled: true, modelPath: 'movenet-multipose.json' }, // body: { enabled: true, modelPath: 'movenet-multipose.json' },
// body: { enabled: true, modelPath: 'posenet.json' }, // body: { enabled: true, modelPath: 'posenet.json' },
segmentation: { enabled: false }, segmentation: { enabled: false },
/*
*/ */
}; };
@ -82,6 +83,7 @@ const ui = {
buffered: true, // should output be buffered between frames buffered: true, // should output be buffered between frames
interpolated: true, // should output be interpolated for smoothness between frames interpolated: true, // should output be interpolated for smoothness between frames
iconSize: '48px', // ui icon sizes iconSize: '48px', // ui icon sizes
autoPlay: false, // start webcam & detection on load
// internal variables // internal variables
busy: false, // internal camera busy flag busy: false, // internal camera busy flag
@ -375,9 +377,9 @@ async function setupCamera() {
canvas.height = video.videoHeight; canvas.height = video.videoHeight;
ui.menuWidth.input.setAttribute('value', video.videoWidth); ui.menuWidth.input.setAttribute('value', video.videoWidth);
ui.menuHeight.input.setAttribute('value', video.videoHeight); ui.menuHeight.input.setAttribute('value', video.videoHeight);
if (live) video.play(); if (live || ui.autoPlay) video.play();
// eslint-disable-next-line no-use-before-define // eslint-disable-next-line no-use-before-define
if (live && !ui.detectThread) runHumanDetect(video, canvas); if ((live || ui.autoPlay) && !ui.detectThread) runHumanDetect(video, canvas);
ui.busy = false; ui.busy = false;
resolve(); resolve();
}; };
@ -936,6 +938,10 @@ async function main() {
ui.bench = JSON.parse(params.get('bench')); ui.bench = JSON.parse(params.get('bench'));
log('overriding bench:', ui.bench); log('overriding bench:', ui.bench);
} }
if (params.has('play')) {
ui.autoPlay = true;
log('overriding autoplay:', true);
}
if (params.has('draw')) { if (params.has('draw')) {
ui.drawWarmup = JSON.parse(params.get('draw')); ui.drawWarmup = JSON.parse(params.get('draw'));
log('overriding drawWarmup:', ui.drawWarmup); log('overriding drawWarmup:', ui.drawWarmup);

View File

@ -66,7 +66,7 @@
"@tensorflow/tfjs-layers": "^3.9.0", "@tensorflow/tfjs-layers": "^3.9.0",
"@tensorflow/tfjs-node": "^3.9.0", "@tensorflow/tfjs-node": "^3.9.0",
"@tensorflow/tfjs-node-gpu": "^3.9.0", "@tensorflow/tfjs-node-gpu": "^3.9.0",
"@types/node": "^16.7.8", "@types/node": "^16.7.10",
"@typescript-eslint/eslint-plugin": "^4.30.0", "@typescript-eslint/eslint-plugin": "^4.30.0",
"@typescript-eslint/parser": "^4.30.0", "@typescript-eslint/parser": "^4.30.0",
"@vladmandic/pilogger": "^0.2.18", "@vladmandic/pilogger": "^0.2.18",

View File

@ -331,9 +331,9 @@ const config: Config = {
// e.g., if model is running st 25 FPS, we can re-use existing bounding // e.g., if model is running st 25 FPS, we can re-use existing bounding
// box for updated hand skeleton analysis as the hand probably // box for updated hand skeleton analysis as the hand probably
// hasn't moved much in short time (10 * 1/25 = 0.25 sec) // hasn't moved much in short time (10 * 1/25 = 0.25 sec)
minConfidence: 0.1, // threshold for discarding a prediction minConfidence: 0.8, // threshold for discarding a prediction
iouThreshold: 0.1, // ammount of overlap between two detected objects before one object is removed iouThreshold: 0.2, // ammount of overlap between two detected objects before one object is removed
maxDetected: 2, // maximum number of hands detected in the input maxDetected: 1, // maximum number of hands detected in the input
// should be set to the minimum number for performance // should be set to the minimum number for performance
landmarks: true, // detect hand landmarks or just hand boundary box landmarks: true, // detect hand landmarks or just hand boundary box
detector: { detector: {

View File

@ -407,6 +407,7 @@ export async function hand(inCanvas: HTMLCanvasElement, result: Array<Hand>, dra
} }
if (localOptions.drawLabels) { if (localOptions.drawLabels) {
const addHandLabel = (part, title) => { const addHandLabel = (part, title) => {
if (!part) return;
ctx.fillStyle = localOptions.useDepth ? `rgba(${127.5 + (2 * part[part.length - 1][2])}, ${127.5 - (2 * part[part.length - 1][2])}, 255, 0.5)` : localOptions.color; ctx.fillStyle = localOptions.useDepth ? `rgba(${127.5 + (2 * part[part.length - 1][2])}, ${127.5 - (2 * part[part.length - 1][2])}, 255, 0.5)` : localOptions.color;
ctx.fillText(title, part[part.length - 1][0] + 4, part[part.length - 1][1] + 4); ctx.fillText(title, part[part.length - 1][0] + 4, part[part.length - 1][1] + 4);
}; };

View File

@ -167,6 +167,11 @@ export function estimate(landmarks) {
// step 1: calculate slopes // step 1: calculate slopes
const slopesXY: Array<number[]> = []; const slopesXY: Array<number[]> = [];
const slopesYZ: Array<number[]> = []; const slopesYZ: Array<number[]> = [];
const fingerCurls: Array<number> = [];
const fingerDirections: Array<number> = [];
if (!landmarks) return { curls: fingerCurls, directions: fingerDirections };
// step 1: calculate slopes
for (const finger of Finger.all) { for (const finger of Finger.all) {
const points = Finger.getPoints(finger); const points = Finger.getPoints(finger);
const slopeAtXY: Array<number> = []; const slopeAtXY: Array<number> = [];
@ -186,8 +191,6 @@ export function estimate(landmarks) {
} }
// step 2: calculate orientations // step 2: calculate orientations
const fingerCurls: Array<number> = [];
const fingerDirections: Array<number> = [];
for (const finger of Finger.all) { for (const finger of Finger.all) {
// start finger predictions from palm - except for thumb // start finger predictions from palm - except for thumb
const pointIndexAt = (finger === Finger.thumb) ? 1 : 0; const pointIndexAt = (finger === Finger.thumb) ? 1 : 0;

View File

@ -40,31 +40,23 @@ export class HandDetector {
} }
async getBoxes(input, config) { async getBoxes(input, config) {
const batched = this.model.predict(input) as Tensor; const t: Record<string, Tensor> = {};
const predictions = tf.squeeze(batched); t.batched = this.model.predict(input) as Tensor;
tf.dispose(batched); t.predictions = tf.squeeze(t.batched);
const scoresT = tf.tidy(() => tf.squeeze(tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])))); t.scores = tf.tidy(() => tf.squeeze(tf.sigmoid(tf.slice(t.predictions, [0, 0], [-1, 1]))));
const scores = await scoresT.data(); const scores = await t.scores.data();
const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]); t.boxes = tf.slice(t.predictions, [0, 1], [-1, 4]);
const boxes = this.normalizeBoxes(rawBoxes); t.norm = this.normalizeBoxes(t.boxes);
tf.dispose(rawBoxes); t.nms = await tf.image.nonMaxSuppressionAsync(t.norm, t.scores, 10 * config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence); const nms = await t.nms.array() as Array<number>;
const filtered = await filteredT.array();
tf.dispose(scoresT);
tf.dispose(filteredT);
const hands: Array<{ box: Tensor, palmLandmarks: Tensor, confidence: number }> = []; const hands: Array<{ box: Tensor, palmLandmarks: Tensor, confidence: number }> = [];
for (const index of filtered) { for (const index of nms) {
if (scores[index] >= config.hand.minConfidence) { const palmBox = tf.slice(t.norm, [index, 0], [1, -1]);
const matchingBox = tf.slice(boxes, [index, 0], [1, -1]); const palmLandmarks = tf.tidy(() => tf.reshape(this.normalizeLandmarks(tf.slice(t.predictions, [index, 5], [1, 14]), index), [-1, 2]));
const rawPalmLandmarks = tf.slice(predictions, [index, 5], [1, 14]); hands.push({ box: palmBox, palmLandmarks, confidence: scores[index] });
const palmLandmarks = tf.tidy(() => tf.reshape(this.normalizeLandmarks(rawPalmLandmarks, index), [-1, 2])); // console.log('handdetector:getBoxes', nms.length, index, scores[index], config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence, palmBox.dataSync());
tf.dispose(rawPalmLandmarks);
hands.push({ box: matchingBox, palmLandmarks, confidence: scores[index] });
} }
} for (const tensor of Object.keys(t)) tf.dispose(t[tensor]); // dispose all
tf.dispose(predictions);
tf.dispose(boxes);
return hands; return hands;
} }

View File

@ -85,7 +85,7 @@ export class HandPipeline {
// run new detector every skipFrames unless we only want box to start with // run new detector every skipFrames unless we only want box to start with
let boxes; let boxes;
// console.log(this.skipped, config.hand.skipFrames, !config.hand.landmarks, !config.skipFrame); // console.log('handpipeline:estimateHands:skip criteria', this.skipped, config.hand.skipFrames, !config.hand.landmarks, !config.skipFrame); // should skip hand detector?
if ((this.skipped === 0) || (this.skipped > config.hand.skipFrames) || !config.hand.landmarks || !config.skipFrame) { if ((this.skipped === 0) || (this.skipped > config.hand.skipFrames) || !config.hand.landmarks || !config.skipFrame) {
boxes = await this.handDetector.estimateHandBounds(image, config); boxes = await this.handDetector.estimateHandBounds(image, config);
this.skipped = 0; this.skipped = 0;
@ -120,7 +120,7 @@ export class HandPipeline {
tf.dispose(handImage); tf.dispose(handImage);
const confidence = (await confidenceT.data())[0]; const confidence = (await confidenceT.data())[0];
tf.dispose(confidenceT); tf.dispose(confidenceT);
if (confidence >= config.hand.minConfidence) { if (confidence >= config.hand.minConfidence / 4) {
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]); const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
const rawCoords = await keypointsReshaped.array(); const rawCoords = await keypointsReshaped.array();
tf.dispose(keypoints); tf.dispose(keypoints);
@ -135,6 +135,7 @@ export class HandPipeline {
}; };
hands.push(result); hands.push(result);
} else { } else {
// console.log('handpipeline:estimateHands low', confidence);
this.storedBoxes[i] = null; this.storedBoxes[i] = null;
} }
tf.dispose(keypoints); tf.dispose(keypoints);

View File

@ -148,8 +148,8 @@ export class Human {
* @param userConfig: {@link Config} * @param userConfig: {@link Config}
*/ */
constructor(userConfig?: Config | Record<string, unknown>) { constructor(userConfig?: Config | Record<string, unknown>) {
Human.version = app.version; Human.version = app.version; // expose version property on instance of class
Object.defineProperty(this, 'version', { value: app.version }); Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself
defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`; defaults.wasmPath = `https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tf.version_core}/dist/`;
this.config = mergeDeep(defaults, userConfig || {}); this.config = mergeDeep(defaults, userConfig || {});
this.tf = tf; this.tf = tf;
@ -427,6 +427,7 @@ export class Human {
const skipFrame = diff < Math.max(this.config.cacheSensitivity, this.#lastCacheDiff); const skipFrame = diff < Math.max(this.config.cacheSensitivity, this.#lastCacheDiff);
// if difference is above 10x threshold, don't use last value to force reset cache for significant change of scenes or images // if difference is above 10x threshold, don't use last value to force reset cache for significant change of scenes or images
this.#lastCacheDiff = diff > 10 * this.config.cacheSensitivity ? 0 : diff; this.#lastCacheDiff = diff > 10 * this.config.cacheSensitivity ? 0 : diff;
// console.log('skipFrame', skipFrame, this.config.cacheSensitivity, diff);
return skipFrame; return skipFrame;
} }

View File

@ -59,9 +59,10 @@ export function calc(newResult: Result): Result {
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / bufferedFactor)) as [number, number, number, number]; .map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / bufferedFactor)) as [number, number, number, number];
const boxRaw = (newResult.hand[i].boxRaw // update boxRaw const boxRaw = (newResult.hand[i].boxRaw // update boxRaw
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / bufferedFactor)) as [number, number, number, number]; .map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / bufferedFactor)) as [number, number, number, number];
const keypoints = newResult.hand[i].keypoints // update landmarks const keypoints = newResult.hand[i].keypoints ? newResult.hand[i].keypoints // update landmarks
.map((landmark, j) => landmark .map((landmark, j) => landmark
.map((coord, k) => (((bufferedFactor - 1) * bufferedResult.hand[i].keypoints[j][k] + coord) / bufferedFactor)) as [number, number, number]); .map((coord, k) => (((bufferedFactor - 1) * bufferedResult.hand[i].keypoints[j][k] + coord) / bufferedFactor)) as [number, number, number])
: [];
const keys = Object.keys(newResult.hand[i].annotations); // update annotations const keys = Object.keys(newResult.hand[i].annotations); // update annotations
const annotations = {}; const annotations = {};
for (const key of keys) { for (const key of keys) {