convert blazeface to module

pull/293/head
Vladimir Mandic 2021-04-25 16:56:10 -04:00
parent 92930efb65
commit 66b7272987
9 changed files with 5950 additions and 11845 deletions

View File

@ -11,10 +11,16 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
### **HEAD -> main** 2021/04/25 mandic00@live.com
- build nodejs deliverables in non-minified form
- stop building sourcemaps for nodejs deliverables
- remove deallocate, profile, scoped
- replaced maxfaces, maxdetections, maxhands, maxresults with maxdetected
- replaced nmsradius with built-in default
- unified minconfidence and scorethresdold as minconfidence
- add exception handlers to all demos
- remove blazeface-front and add unhandledrejection handler
- major update for 1.8 release candidate
### **origin/main** 2021/04/25 mandic00@live.com
- enable webworker detection
### **1.7.1** 2021/04/25 mandic00@live.com

View File

@ -6,8 +6,7 @@ N/A
## Exploring Features
- Implement built-in input handler for `http:`, `https:`, `file:`
- Canvas.js for WASM on NodeJS
N/A
## Explore Models

View File

@ -7,10 +7,9 @@ import Menu from './helpers/menu.js';
import GLBench from './helpers/gl-bench.js';
import webRTC from './helpers/webrtc.js';
const userConfig = {};
// const userConfig = {};
let human;
/*
const userConfig = {
backend: 'humangl',
async: false,
@ -21,7 +20,7 @@ const userConfig = {
enabled: false,
flip: false,
},
face: { enabled: false,
face: { enabled: true,
mesh: { enabled: true },
iris: { enabled: true },
description: { enabled: false },
@ -29,11 +28,10 @@ const userConfig = {
},
hand: { enabled: false },
gesture: { enabled: false },
body: { enabled: true, modelPath: 'posenet.json' },
body: { enabled: false, modelPath: 'posenet.json' },
// body: { enabled: true, modelPath: 'blazepose.json' },
// object: { enabled: true },
};
*/
// ui options
const ui = {

View File

@ -1,5 +1,6 @@
import { log, now } from './helpers';
import * as tf from '../dist/tfjs.esm.js';
import * as facemesh from './blazeface/facemesh';
import * as emotion from './emotion/emotion';
import * as faceres from './faceres/faceres';
@ -130,7 +131,7 @@ export const detectFace = async (parent, input): Promise<any> => {
}> = [];
parent.state = 'run:face';
timeStamp = now();
const faces = await parent.models.face?.estimateFaces(input, parent.config);
const faces = await facemesh.predict(input, parent.config);
parent.perf.face = Math.trunc(now() - timeStamp);
if (!faces) return [];
for (const face of faces) {

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,6 @@
import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box';
import * as anchors from './anchors';
export class HandDetector {
model: any;
@ -9,13 +10,13 @@ export class HandDetector {
inputSizeTensor: any;
doubleInputSizeTensor: any;
constructor(model, inputSize, anchorsAnnotated) {
constructor(model) {
this.model = model;
this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);
this.anchors = anchors.anchors.map((anchor) => [anchor.x, anchor.y]);
this.anchorsTensor = tf.tensor2d(this.anchors);
this.inputSize = inputSize;
this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);
this.doubleInputSizeTensor = tf.tensor1d([inputSize * 2, inputSize * 2]);
this.inputSize = this.model?.inputs[0].shape[2];
this.inputSizeTensor = tf.tensor1d([this.inputSize, this.inputSize]);
this.doubleInputSizeTensor = tf.tensor1d([this.inputSize * 2, this.inputSize * 2]);
}
normalizeBoxes(boxes) {

View File

@ -18,10 +18,10 @@ export class HandPipeline {
skipped: number;
detectedHands: number;
constructor(handDetector, landmarkDetector, inputSize) {
constructor(handDetector, landmarkDetector) {
this.handDetector = handDetector;
this.landmarkDetector = landmarkDetector;
this.inputSize = inputSize;
this.inputSize = this.landmarkDetector?.inputs[0].shape[2];
this.storedBoxes = [];
this.skipped = 0;
this.detectedHands = 0;

View File

@ -2,9 +2,8 @@ import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js';
import * as handdetector from './handdetector';
import * as handpipeline from './handpipeline';
import * as anchors from './anchors';
const MESH_ANNOTATIONS = {
const meshAnnotations = {
thumb: [1, 2, 3, 4],
indexFinger: [5, 6, 7, 8],
middleFinger: [9, 10, 11, 12],
@ -13,49 +12,39 @@ const MESH_ANNOTATIONS = {
palmBase: [0],
};
export class HandPose {
handPipeline: any;
constructor(handPipeline) {
this.handPipeline = handPipeline;
}
static getAnnotations() {
return MESH_ANNOTATIONS;
}
async estimateHands(input, config) {
const predictions = await this.handPipeline.estimateHands(input, config);
if (!predictions) return [];
const hands: Array<{ confidence: number, box: any, boxRaw: any, landmarks: any, annotations: any }> = [];
for (const prediction of predictions) {
const annotations = {};
if (prediction.landmarks) {
for (const key of Object.keys(MESH_ANNOTATIONS)) {
annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);
}
}
const box = prediction.box ? [
Math.max(0, prediction.box.topLeft[0]),
Math.max(0, prediction.box.topLeft[1]),
Math.min(input.shape[2], prediction.box.bottomRight[0]) - Math.max(0, prediction.box.topLeft[0]),
Math.min(input.shape[1], prediction.box.bottomRight[1]) - Math.max(0, prediction.box.topLeft[1]),
] : [];
const boxRaw = [
(prediction.box.topLeft[0]) / input.shape[2],
(prediction.box.topLeft[1]) / input.shape[1],
(prediction.box.bottomRight[0] - prediction.box.topLeft[0]) / input.shape[2],
(prediction.box.bottomRight[1] - prediction.box.topLeft[1]) / input.shape[1],
];
hands.push({ confidence: Math.round(100 * prediction.confidence) / 100, box, boxRaw, landmarks: prediction.landmarks, annotations });
}
return hands;
}
}
let handDetectorModel;
let handPoseModel;
export async function load(config): Promise<HandPose> {
let handPipeline;
export async function predict(input, config) {
const predictions = await handPipeline.estimateHands(input, config);
if (!predictions) return [];
const hands: Array<{ confidence: number, box: any, boxRaw: any, landmarks: any, annotations: any }> = [];
for (const prediction of predictions) {
const annotations = {};
if (prediction.landmarks) {
for (const key of Object.keys(meshAnnotations)) {
annotations[key] = meshAnnotations[key].map((index) => prediction.landmarks[index]);
}
}
const box = prediction.box ? [
Math.max(0, prediction.box.topLeft[0]),
Math.max(0, prediction.box.topLeft[1]),
Math.min(input.shape[2], prediction.box.bottomRight[0]) - Math.max(0, prediction.box.topLeft[0]),
Math.min(input.shape[1], prediction.box.bottomRight[1]) - Math.max(0, prediction.box.topLeft[1]),
] : [];
const boxRaw = [
(prediction.box.topLeft[0]) / input.shape[2],
(prediction.box.topLeft[1]) / input.shape[1],
(prediction.box.bottomRight[0] - prediction.box.topLeft[0]) / input.shape[2],
(prediction.box.bottomRight[1] - prediction.box.topLeft[1]) / input.shape[1],
];
hands.push({ confidence: Math.round(100 * prediction.confidence) / 100, box, boxRaw, landmarks: prediction.landmarks, annotations });
}
return hands;
}
export async function load(config): Promise<[Object, Object]> {
if (!handDetectorModel || !handPoseModel) {
[handDetectorModel, handPoseModel] = await Promise.all([
config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
@ -71,8 +60,7 @@ export async function load(config): Promise<HandPose> {
if (config.debug) log('cached model:', handDetectorModel.modelUrl);
if (config.debug) log('cached model:', handPoseModel.modelUrl);
}
const handDetector = new handdetector.HandDetector(handDetectorModel, handDetectorModel?.inputs[0].shape[2], anchors.anchors);
const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, handPoseModel?.inputs[0].shape[2]);
const handPose = new HandPose(handPipeline);
return handPose;
const handDetector = new handdetector.HandDetector(handDetectorModel);
handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel);
return [handDetectorModel, handPoseModel];
}

View File

@ -82,11 +82,11 @@ export class Human {
};
/** Internal: Currently loaded models */
models: {
face: facemesh.MediaPipeFaceMesh | Model | null,
face: [Model, Model, Model] | null,
posenet: Model | null,
blazepose: Model | null,
efficientpose: Model | null,
handpose: handpose.HandPose | null,
handpose: [Model, Model] | null,
iris: Model | null,
age: Model | null,
gender: Model | null,
@ -431,12 +431,12 @@ export class Human {
// run handpose
this.analyze('Start Hand:');
if (this.config.async) {
handRes = this.config.hand.enabled ? this.models.handpose?.estimateHands(process.tensor, this.config) : [];
handRes = this.config.hand.enabled ? handpose.predict(process.tensor, this.config) : [];
if (this.perf.hand) delete this.perf.hand;
} else {
this.state = 'run:hand';
timeStamp = now();
handRes = this.config.hand.enabled ? await this.models.handpose?.estimateHands(process.tensor, this.config) : [];
handRes = this.config.hand.enabled ? await handpose.predict(process.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp);
if (current > 0) this.perf.hand = current;
}