fix serious performance bug around skipframes

pull/293/head
Vladimir Mandic 2020-11-17 17:42:44 -05:00
parent 9391acd72d
commit 3f180cfc5d
5 changed files with 24 additions and 24 deletions

View File

@ -331,7 +331,7 @@ async function processImage(input) {
// just initialize everything and call main function
async function detectVideo() {
human.config.videoOptimized = true;
userConfig.videoOptimized = true;
document.getElementById('samples-container').style.display = 'none';
document.getElementById('canvas').style.display = 'block';
const video = document.getElementById('video');
@ -353,7 +353,7 @@ async function detectVideo() {
// just initialize everything and call main function
async function detectSampleImages() {
document.getElementById('play').style.display = 'none';
human.config.videoOptimized = false;
userConfig.videoOptimized = false;
const size = 12 + Math.trunc(12 * ui.columns * window.innerWidth / document.body.clientWidth);
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);
ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;

View File

@ -55,14 +55,14 @@ class HandDetector {
const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);
const boxes = this.normalizeBoxes(rawBoxes);
rawBoxes.dispose();
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold);
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxHands, config.hand.iouThreshold, config.hand.scoreThreshold);
const filtered = filteredT.arraySync();
scores.dispose();
filteredT.dispose();
const hands = [];
for (const boxIndex of filtered) {
if (scoresVal[boxIndex] >= config.minConfidence) {
if (scoresVal[boxIndex] >= config.hand.minConfidence) {
const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);
const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]);
const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));
@ -78,7 +78,7 @@ class HandDetector {
async estimateHandBounds(input, config) {
const inputHeight = input.shape[1];
const inputWidth = input.shape[2];
const image = tf.tidy(() => input.resizeBilinear([config.inputSize, config.inputSize]).div(127.5).sub(1));
const image = tf.tidy(() => input.resizeBilinear([config.hand.inputSize, config.hand.inputSize]).div(127.5).sub(1));
const predictions = await this.getBoxes(image, config);
image.dispose();
if (!predictions || predictions.length === 0) return null;
@ -90,7 +90,7 @@ class HandDetector {
const palmLandmarks = prediction.palmLandmarks.arraySync();
prediction.box.dispose();
prediction.palmLandmarks.dispose();
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / config.inputSize, inputHeight / config.inputSize]));
hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / config.hand.inputSize, inputHeight / config.hand.inputSize]));
}
return hands;
}

View File

@ -89,27 +89,27 @@ class HandPipeline {
// run new detector every skipFrames unless we only want box to start with
let boxes;
if ((this.skipped > config.skipFrames) || !config.landmarks || !config.videoOptimized) {
if ((this.skipped > config.hand.skipFrames) || !config.hand.landmarks || !config.videoOptimized) {
boxes = await this.boxDetector.estimateHandBounds(image, config);
// don't reset on test image
if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;
}
// if detector result count doesn't match current working set, use it to reset current working set
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.maxHands) || !config.landmarks)) {
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxHands) || !config.hand.landmarks)) {
this.storedBoxes = [];
this.detectedHands = 0;
for (const possible of boxes) this.storedBoxes.push(possible);
if (this.storedBoxes.length > 0) useFreshBox = true;
}
const hands = [];
// console.log(`skipped: ${this.skipped} max: ${config.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
// console.log(`skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
// go through working set of boxes
for (const i in this.storedBoxes) {
const currentBox = this.storedBoxes[i];
if (!currentBox) continue;
if (config.landmarks) {
if (config.hand.landmarks) {
const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);
const palmCenter = box.getBoxCenter(currentBox);
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
@ -124,7 +124,7 @@ class HandPipeline {
handImage.dispose();
const confidenceValue = confidence.dataSync()[0];
confidence.dispose();
if (confidenceValue >= config.minConfidence) {
if (confidenceValue >= config.hand.minConfidence) {
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync();
keypoints.dispose();

View File

@ -69,16 +69,16 @@ exports.HandPose = HandPose;
async function load(config) {
const [handDetectorModel, handPoseModel] = await Promise.all([
tf.loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }),
tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }),
]);
const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors);
const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize);
const detector = new handdetector.HandDetector(handDetectorModel, config.hand.inputSize, anchors.anchors);
const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.hand.inputSize);
const handpose = new HandPose(pipe);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.detector.modelPath.match(/\/(.*)\./)[1]}`);
console.log(`Human: load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
console.log(`Human: load model: ${config.hand.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
return handpose;
}
exports.load = load;

View File

@ -132,22 +132,22 @@ class Human {
this.models.posenet,
this.models.handpose,
] = await Promise.all([
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config) : null),
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
]);
} else {
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config);
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.load || 0)) this.perf.load = current;
@ -207,7 +207,7 @@ class Human {
const faceRes = [];
this.state = 'run:face';
timeStamp = now();
const faces = await this.models.facemesh.estimateFaces(input, this.config.face);
const faces = await this.models.facemesh.estimateFaces(input, this.config);
this.perf.face = Math.trunc(now() - timeStamp);
for (const face of faces) {
this.analyze('Get Face');
@ -378,12 +378,12 @@ class Human {
// run handpose
this.analyze('Start Hand:');
if (this.config.async) {
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config) : [];
if (this.perf.hand) delete this.perf.hand;
} else {
this.state = 'run:hand';
timeStamp = now();
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config) : [];
this.perf.hand = Math.trunc(now() - timeStamp);
}
// this.analyze('End Hand:');