conditional hand rotation

pull/293/head
Vladimir Mandic 2020-12-10 15:46:45 -05:00
parent 866c2f0d0f
commit 565a8b116a
6 changed files with 40 additions and 28 deletions

View File

@ -140,6 +140,8 @@ export default {
hand: {
enabled: true,
rotation: false, // use best-guess rotated hand image or just box with rotation as-is
// false means higher performance, but incorrect finger mapping if hand is inverted
inputSize: 256, // fixed value
skipFrames: 19, // how many frames to go without re-running the hand bounding box detector
// only used for video inputs

View File

@ -3,10 +3,14 @@ import draw from './draw.js';
import Menu from './menu.js';
import GLBench from './gl-bench.js';
// const userConfig = {}; // add any user configuration overrides
const userConfig = {}; // add any user configuration overrides
/*
const userConfig = {
async: false,
face: { enabled: false },
body: { enabled: false },
};
*/
const human = new Human(userConfig);
@ -33,7 +37,7 @@ const ui = {
console: true,
maxFPSframes: 10,
modelsPreload: true,
modelsWarmup: false,
modelsWarmup: true,
menuWidth: 0,
menuHeight: 0,
camera: {},
@ -44,7 +48,7 @@ const ui = {
detectThread: null,
framesDraw: 0,
framesDetect: 0,
bench: true,
bench: false,
};
// global variables
@ -471,6 +475,10 @@ function setupMenu() {
human.config.face.detector.iouThreshold = parseFloat(val);
human.config.hand.iouThreshold = parseFloat(val);
});
menu.process.addBool('detection rotation', human.config.face.detector, 'rotation', (val) => {
human.config.face.detector.rotation = val;
human.config.hand.rotation = val;
});
menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');
menu.process.addButton('process sample images', 'process images', () => detectSampleImages());
menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');

View File

@ -18,11 +18,13 @@
import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box';
import * as util from './util';
// eslint-disable-next-line no-unused-vars
import { log } from '../log.js';
const PALM_BOX_SHIFT_VECTOR = [0, -0.4];
const PALM_BOX_ENLARGE_FACTOR = 3;
const HAND_BOX_SHIFT_VECTOR = [0, -0.1]; // move detected hand box by x,y to ease landmark detection
const HAND_BOX_ENLARGE_FACTOR = 1.65; // increased from model default 1.65;
// const PALM_BOX_SHIFT_VECTOR = [0, -0.4];
const PALM_BOX_ENLARGE_FACTOR = 5; // default 3
// const HAND_BOX_SHIFT_VECTOR = [0, -0.1]; // move detected hand box by x,y to ease landmark detection
const HAND_BOX_ENLARGE_FACTOR = 1.65; // default 1.65
const PALM_LANDMARK_IDS = [0, 5, 9, 13, 17, 1, 2];
const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
@ -38,22 +40,20 @@ class HandPipeline {
}
getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {
const rotatedPalmLandmarks = palmLandmarks.map((coord) => {
const homogeneousCoordinate = [...coord, 1];
return util.rotatePoint(homogeneousCoordinate, rotationMatrix);
});
const rotatedPalmLandmarks = palmLandmarks.map((coord) => util.rotatePoint([...coord, 1], rotationMatrix));
const boxAroundPalm = this.calculateLandmarksBoundingBox(rotatedPalmLandmarks);
return box.enlargeBox(box.squarifyBox(box.shiftBox(boxAroundPalm, PALM_BOX_SHIFT_VECTOR)), PALM_BOX_ENLARGE_FACTOR);
// return box.enlargeBox(box.squarifyBox(box.shiftBox(boxAroundPalm, PALM_BOX_SHIFT_VECTOR)), PALM_BOX_ENLARGE_FACTOR);
return box.enlargeBox(box.squarifyBox(boxAroundPalm), PALM_BOX_ENLARGE_FACTOR);
}
getBoxForHandLandmarks(landmarks) {
const boundingBox = this.calculateLandmarksBoundingBox(landmarks);
const boxAroundHand = box.enlargeBox(box.squarifyBox(box.shiftBox(boundingBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);
const palmLandmarks = [];
// const boxAroundHand = box.enlargeBox(box.squarifyBox(box.shiftBox(boundingBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);
const boxAroundHand = box.enlargeBox(box.squarifyBox(boundingBox), HAND_BOX_ENLARGE_FACTOR);
boxAroundHand.palmLandmarks = [];
for (let i = 0; i < PALM_LANDMARK_IDS.length; i++) {
palmLandmarks.push(landmarks[PALM_LANDMARK_IDS[i]].slice(0, 2));
boxAroundHand.palmLandmarks.push(landmarks[PALM_LANDMARK_IDS[i]].slice(0, 2));
}
boxAroundHand.palmLandmarks = palmLandmarks;
return boxAroundHand;
}
@ -110,10 +110,10 @@ class HandPipeline {
const currentBox = this.storedBoxes[i];
if (!currentBox) continue;
if (config.hand.landmarks) {
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
const angle = config.hand.rotation ? util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]) : 0;
const palmCenter = box.getBoxCenter(currentBox);
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
const rotatedImage = tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);
const rotatedImage = config.hand.rotation ? tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized) : image.clone();
const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = box.cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -146,7 +146,8 @@ class HandPipeline {
}
keypoints.dispose();
} else {
const enlarged = box.enlargeBox(box.squarifyBox(box.shiftBox(currentBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);
// const enlarged = box.enlargeBox(box.squarifyBox(box.shiftBox(currentBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);
const enlarged = box.enlargeBox(box.squarifyBox(currentBox), HAND_BOX_ENLARGE_FACTOR);
const result = {
confidence: currentBox.confidence,
box: {

View File

@ -19,7 +19,7 @@
import { log } from '../log.js';
import * as tf from '../../dist/tfjs.esm.js';
import * as handdetector from './handdetector';
import * as pipeline from './handpipeline';
import * as handpipeline from './handpipeline';
import * as anchors from './anchors';
const MESH_ANNOTATIONS = {
@ -32,8 +32,8 @@ const MESH_ANNOTATIONS = {
};
class HandPose {
constructor(pipe) {
this.pipeline = pipe;
constructor(handPipeline) {
this.handPipeline = handPipeline;
}
static getAnnotations() {
@ -41,7 +41,7 @@ class HandPose {
}
async estimateHands(input, config) {
const predictions = await this.pipeline.estimateHands(input, config);
const predictions = await this.handPipeline.estimateHands(input, config);
if (!predictions) return [];
const hands = [];
for (const prediction of predictions) {
@ -74,11 +74,11 @@ async function load(config) {
config.hand.enabled ? tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
config.hand.landmarks ? tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
]);
const detector = new handdetector.HandDetector(handDetectorModel, config.hand.inputSize, anchors.anchors);
const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.hand.inputSize);
const handpose = new HandPose(pipe);
const handDetector = new handdetector.HandDetector(handDetectorModel, config.hand.inputSize, anchors.anchors);
const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, config.hand.inputSize);
const handPose = new HandPose(handPipeline);
if (config.hand.enabled) log(`load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`);
if (config.hand.landmarks) log(`load model: ${config.hand.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
return handpose;
return handPose;
}
exports.load = load;

View File

@ -120,6 +120,7 @@ class Human {
}
this.firstRun = false;
}
if (this.config.async) {
[
this.models.facemesh,

2
wiki

@ -1 +1 @@
Subproject commit 60eb01217f8d3e69055c991991183dd295cf3766
Subproject commit 640fbd9a107c52692bfaaede0d751c5572cf7f22