added experimental movenet-lightning and removed blazepose from default dist

pull/356/head
Vladimir Mandic 2021-05-29 09:20:01 -04:00
parent cbe8e5a7d1
commit 185463e30d
13 changed files with 493 additions and 23 deletions

View File

@ -11,9 +11,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
### **HEAD -> main** 2021/05/28 mandic00@live.com
### **origin/main** 2021/05/27 mandic00@live.com
- added experimental face.rotation.gaze
- fix and optimize for mobile platform
- lock typescript to 4.2 due to typedoc incompatibility with 4.3
### **1.9.4** 2021/05/27 mandic00@live.com

View File

@ -218,7 +218,7 @@ Default models in Human library are:
- **Body Analysis**: PoseNet (AtomicBits version)
Note that alternative models are provided and can be enabled via configuration
For example, `PoseNet` model can be switched for `BlazePose` model depending on the use case
For example, `PoseNet` model can be switched for `BlazePose`, `EfficientPose` or `MoveNet` model depending on the use case
For more info, see [**Configuration Details**](https://github.com/vladmandic/human/wiki/Configuration) and [**List of Models**](https://github.com/vladmandic/human/wiki/Models)

View File

@ -13,8 +13,6 @@ N/A
- InsightFace
RetinaFace detector and ArcFace recognition
<https://github.com/deepinsight/insightface>
- Blazepose
Needs detector before running pose to center the image
## In Progress

View File

@ -16,14 +16,15 @@ let human;
const userConfig = {
warmup: 'none',
/*
backend: 'webgl',
// async: false,
// cacheSensitivity: 0,
async: false,
cacheSensitivity: 0,
filter: {
enabled: false,
flip: false,
},
face: { enabled: true,
face: { enabled: false,
detector: { return: true },
mesh: { enabled: true },
iris: { enabled: true },
@ -31,10 +32,12 @@ const userConfig = {
emotion: { enabled: false },
},
hand: { enabled: false },
body: { enabled: false, modelPath: 'posenet.json' },
// body: { enabled: true, modelPath: 'posenet.json' },
body: { enabled: true, modelPath: 'movenet-lightning.json' },
// body: { enabled: true, modelPath: 'blazepose.json' },
object: { enabled: false },
gesture: { enabled: true },
*/
};
const drawOptions = {

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@ -288,11 +288,11 @@ const config: Config = {
body: {
enabled: true,
modelPath: 'posenet.json', // body model, can be absolute path or relative to modelBasePath
// can be 'posenet' or 'blazepose'
// can be 'posenet', 'blazepose', 'efficientpose', 'movenet'
maxDetected: 1, // maximum number of people detected in the input
// should be set to the minimum number for performance
// only valid for posenet as blazepose only detects single pose
minConfidence: 0.1, // threshold for discarding a prediction
// only valid for posenet as other models detects single pose
minConfidence: 0.2, // threshold for discarding a prediction
},
hand: {

View File

@ -40,6 +40,7 @@ export interface DrawOptions {
drawLabels: boolean,
drawBoxes: boolean,
drawPolygons: boolean,
drawGaze: boolean,
fillPolygons: boolean,
useDepth: boolean,
useCurves: boolean,
@ -60,6 +61,7 @@ export const options: DrawOptions = {
drawLabels: <boolean>true,
drawBoxes: <boolean>true,
drawPolygons: <boolean>true,
drawGaze: <boolean>false,
fillPolygons: <boolean>false,
useDepth: <boolean>true,
useCurves: <boolean>false,
@ -238,7 +240,7 @@ export async function face(inCanvas: HTMLCanvasElement, result: Array<Face>, dra
ctx.fill();
}
}
if (f.rotation?.gaze?.strength && f.rotation?.gaze?.angle) {
if (localOptions.drawGaze && f.rotation?.gaze?.strength && f.rotation?.gaze?.angle) {
const leftGaze = [
f.annotations['leftEyeIris'][0][0] + (Math.cos(f.rotation.gaze.angle) * f.rotation.gaze.strength * f.box[2]),
f.annotations['leftEyeIris'][0][1] - (Math.sin(f.rotation.gaze.angle) * f.rotation.gaze.strength * f.box[3]),

View File

@ -99,20 +99,20 @@ export async function predict(image, config): Promise<Body[]> {
}
score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
const x = keypoints.map((a) => a.position.x);
const y = keypoints.map((a) => a.position.x);
const y = keypoints.map((a) => a.position.y);
box = [
Math.min(...x),
Math.min(...y),
Math.max(...x) - Math.min(...x),
Math.max(...y) - Math.min(...x),
Math.max(...y) - Math.min(...y),
];
const xRaw = keypoints.map((a) => a.positionRaw.x);
const yRaw = keypoints.map((a) => a.positionRaw.x);
const yRaw = keypoints.map((a) => a.positionRaw.y);
boxRaw = [
Math.min(...xRaw),
Math.min(...yRaw),
Math.max(...xRaw) - Math.min(...xRaw),
Math.max(...yRaw) - Math.min(...xRaw),
Math.max(...yRaw) - Math.min(...yRaw),
];
resolve([{ id: 0, score, box, boxRaw, keypoints }]);
});

View File

@ -15,6 +15,8 @@ import * as emotion from './emotion/emotion';
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
import * as blazepose from './blazepose/blazepose';
import * as efficientpose from './efficientpose/efficientpose';
import * as movenet from './movenet/movenet';
import * as nanodet from './object/nanodet';
import * as centernet from './object/centernet';
import * as gesture from './gesture/gesture';
@ -91,6 +93,7 @@ export class Human {
posenet: Model | null,
blazepose: Model | null,
efficientpose: Model | null,
movenet: Model | null,
handpose: [Model, Model] | null,
iris: Model | null,
age: Model | null,
@ -105,7 +108,7 @@ export class Human {
classes: {
facemesh: typeof facemesh;
emotion: typeof emotion;
body: typeof posenet | typeof blazepose;
body: typeof posenet | typeof blazepose | typeof movenet;
hand: typeof handpose;
nanodet: typeof nanodet;
centernet: typeof centernet;
@ -150,6 +153,7 @@ export class Human {
posenet: null,
blazepose: null,
efficientpose: null,
movenet: null,
handpose: null,
iris: null,
age: null,
@ -262,6 +266,8 @@ export class Human {
this.models.handpose,
this.models.posenet,
this.models.blazepose,
this.models.efficientpose,
this.models.movenet,
this.models.nanodet,
this.models.centernet,
this.models.faceres,
@ -271,6 +277,8 @@ export class Human {
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
this.models.posenet || (this.config.body.enabled && this.config.body.modelPath.includes('posenet') ? posenet.load(this.config) : null),
this.models.blazepose || (this.config.body.enabled && this.config.body.modelPath.includes('blazepose') ? blazepose.load(this.config) : null),
this.models.efficientpose || (this.config.body.enabled && this.config.body.modelPath.includes('efficientpose') ? efficientpose.load(this.config) : null),
this.models.movenet || (this.config.body.enabled && this.config.body.modelPath.includes('movenet') ? movenet.load(this.config) : null),
this.models.nanodet || (this.config.object.enabled && this.config.object.modelPath.includes('nanodet') ? nanodet.load(this.config) : null),
this.models.centernet || (this.config.object.enabled && this.config.object.modelPath.includes('centernet') ? centernet.load(this.config) : null),
this.models.faceres || ((this.config.face.enabled && this.config.face.description.enabled) ? faceres.load(this.config) : null),
@ -281,6 +289,8 @@ export class Human {
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
if (this.config.body.enabled && !this.models.posenet && this.config.body.modelPath.includes('posenet')) this.models.posenet = await posenet.load(this.config);
if (this.config.body.enabled && !this.models.blazepose && this.config.body.modelPath.includes('blazepose')) this.models.blazepose = await blazepose.load(this.config);
if (this.config.body.enabled && !this.models.efficientpose && this.config.body.modelPath.includes('efficientpose')) this.models.efficientpose = await blazepose.load(this.config);
if (this.config.body.enabled && !this.models.movenet && this.config.body.modelPath.includes('movenet')) this.models.movenet = await movenet.load(this.config);
if (this.config.object.enabled && !this.models.nanodet && this.config.object.modelPath.includes('nanodet')) this.models.nanodet = await nanodet.load(this.config);
if (this.config.object.enabled && !this.models.centernet && this.config.object.modelPath.includes('centernet')) this.models.centernet = await centernet.load(this.config);
if (this.config.face.enabled && this.config.face.description.enabled && !this.models.faceres) this.models.faceres = await faceres.load(this.config);
@ -474,17 +484,21 @@ export class Human {
if (elapsedTime > 0) this.perf.face = elapsedTime;
}
// run body: can be posenet or blazepose
// run body: can be posenet, blazepose, efficientpose, movenet
this.analyze('Start Body:');
if (this.config.async) {
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('efficientpose')) bodyRes = this.config.body.enabled ? efficientpose.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('movenet')) bodyRes = this.config.body.enabled ? movenet.predict(process.tensor, this.config) : [];
if (this.perf.body) delete this.perf.body;
} else {
this.state = 'run:body';
timeStamp = now();
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('efficientpose')) bodyRes = this.config.body.enabled ? await efficientpose.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('movenet')) bodyRes = this.config.body.enabled ? await movenet.predict(process.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.perf.body = elapsedTime;
}

92
src/movenet/movenet.ts Normal file
View File

@ -0,0 +1,92 @@
/**
* EfficientPose Module
*/
import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js';
import { Body } from '../result';
import { GraphModel } from '../tfjs/types';
let model: GraphModel;
type Keypoints = { score: number, part: string, position: { x: number, y: number }, positionRaw: { x: number, y: number } };
const keypoints: Array<Keypoints> = [];
let box: [number, number, number, number] = [0, 0, 0, 0];
let boxRaw: [number, number, number, number] = [0, 0, 0, 0];
let score = 0;
let skipped = Number.MAX_SAFE_INTEGER;
const bodyParts = ['nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftShoulder', 'rightShoulder', 'leftElbow', 'rightElbow', 'leftWrist', 'rightWrist', 'leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle'];
export async function load(config) {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
return model;
}
export async function predict(image, config): Promise<Body[]> {
if ((skipped < config.body.skipFrames) && config.skipFrame && Object.keys(keypoints).length > 0) {
skipped++;
return [{ id: 0, score, box, boxRaw, keypoints }];
}
skipped = 0;
return new Promise(async (resolve) => {
const tensor = tf.tidy(() => {
if (!model.inputs[0].shape) return null;
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
const cast = tf.cast(resize, 'int32');
return cast;
});
let resT;
if (config.body.enabled) resT = await model.predict(tensor);
tensor.dispose();
if (resT) {
keypoints.length = 0;
const res = resT.arraySync();
tf.dispose(resT);
const kpt = res[0][0];
for (let id = 0; id < kpt.length; id++) {
score = kpt[id][2];
if (score > config.body.minConfidence) {
keypoints.push({
score: Math.round(100 * score) / 100,
part: bodyParts[id],
positionRaw: { // normalized to 0..1
x: kpt[id][1],
y: kpt[id][0],
},
position: { // normalized to input image size
x: Math.round(image.shape[2] * kpt[id][1]),
y: Math.round(image.shape[1] * kpt[id][0]),
},
});
}
}
}
score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
const x = keypoints.map((a) => a.position.x);
const y = keypoints.map((a) => a.position.y);
box = [
Math.min(...x),
Math.min(...y),
Math.max(...x) - Math.min(...x),
Math.max(...y) - Math.min(...y),
];
const xRaw = keypoints.map((a) => a.positionRaw.x);
const yRaw = keypoints.map((a) => a.positionRaw.y);
boxRaw = [
Math.min(...xRaw),
Math.min(...yRaw),
Math.max(...xRaw) - Math.min(...xRaw),
Math.max(...yRaw) - Math.min(...yRaw),
];
resolve([{ id: 0, score, box, boxRaw, keypoints }]);
});
}

View File

@ -141,8 +141,8 @@ async function test(Human, inputConfig) {
log('info', 'test body variants');
config.body = { modelPath: 'posenet.json', enabled: true };
await testDetect(human, 'assets/human-sample-body.jpg', 'posenet');
config.body = { modelPath: 'blazepose.json', enabled: true };
await testDetect(human, 'assets/human-sample-body.jpg', 'blazepose');
config.body = { modelPath: 'movenet-lightning.json', enabled: true };
await testDetect(human, 'assets/human-sample-body.jpg', 'movenet');
await testDetect(human, null, 'default');
log('info', 'test: first instance');

2
wiki

@ -1 +1 @@
Subproject commit 30aee32aa12b52d46e663488d1d0124bfb73f527
Subproject commit 317a8fc76cd933cc38f59948ffade324fc8f1df2